summaryrefslogtreecommitdiffstats
path: root/scripts/makepkg.sh.in
diff options
context:
space:
mode:
authorAllan McRae <allan@archlinux.org>2015-05-17 16:02:17 +0200
committerAllan McRae <allan@archlinux.org>2015-05-19 15:43:00 +0200
commit3d4529335c598e79b5a483fedc4c9d5c12ef10f1 (patch)
treef965489bcea98a5461f064584fffb16d559e7e84 /scripts/makepkg.sh.in
parent1a17249159d2425dfd5103b8699673f72394a385 (diff)
downloadpacman-3d4529335c598e79b5a483fedc4c9d5c12ef10f1.tar.gz
pacman-3d4529335c598e79b5a483fedc4c9d5c12ef10f1.tar.xz
libmakepkg: extract functions for source download and extraction
Signed-off-by: Allan McRae <allan@archlinux.org>
Diffstat (limited to 'scripts/makepkg.sh.in')
-rw-r--r--scripts/makepkg.sh.in524
1 files changed, 0 insertions, 524 deletions
diff --git a/scripts/makepkg.sh.in b/scripts/makepkg.sh.in
index 140bb1a2..c74e84a8 100644
--- a/scripts/makepkg.sh.in
+++ b/scripts/makepkg.sh.in
@@ -175,502 +175,6 @@ enter_fakeroot() {
fakeroot -- $0 -F "${ARGLIST[@]}" || exit $?
}
-download_local() {
- local netfile=$1
- local filepath=$(get_filepath "$netfile")
-
- if [[ -n "$filepath" ]]; then
- msg2 "$(gettext "Found %s")" "${filepath##*/}"
- else
- local filename=$(get_filename "$netfile")
- error "$(gettext "%s was not found in the build directory and is not a URL.")" "$filename"
- exit 1 # $E_MISSING_FILE
- fi
-}
-
-download_file() {
- local netfile=$1
-
- local filepath=$(get_filepath "$netfile")
- if [[ -n "$filepath" ]]; then
- msg2 "$(gettext "Found %s")" "${filepath##*/}"
- return
- fi
-
- local proto=$(get_protocol "$netfile")
-
- # find the client we should use for this URL
- local -a cmdline
- IFS=' ' read -a cmdline < <(get_downloadclient "$proto")
- (( ${#cmdline[@]} )) || exit
-
- local filename=$(get_filename "$netfile")
- local url=$(get_url "$netfile")
-
- if [[ $proto = "scp" ]]; then
- # scp downloads should not pass the protocol in the url
- url="${url##*://}"
- fi
-
- msg2 "$(gettext "Downloading %s...")" "$filename"
-
- # temporary download file, default to last component of the URL
- local dlfile="${url##*/}"
-
- # replace %o by the temporary dlfile if it exists
- if [[ ${cmdline[*]} = *%o* ]]; then
- dlfile=$filename.part
- cmdline=("${cmdline[@]//%o/$dlfile}")
- fi
- # add the URL, either in place of %u or at the end
- if [[ ${cmdline[*]} = *%u* ]]; then
- cmdline=("${cmdline[@]//%u/$url}")
- else
- cmdline+=("$url")
- fi
-
- if ! command -- "${cmdline[@]}" >&2; then
- [[ ! -s $dlfile ]] && rm -f -- "$dlfile"
- error "$(gettext "Failure while downloading %s")" "$filename"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
-
- # rename the temporary download file to the final destination
- if [[ $dlfile != "$filename" ]]; then
- mv -f "$SRCDEST/$dlfile" "$SRCDEST/$filename"
- fi
-}
-
-extract_file() {
- local file=$1
-
- local filepath=$(get_filepath "$file")
- rm -f "$srcdir/${file}"
- ln -s "$filepath" "$srcdir/"
-
- if in_array "$file" "${noextract[@]}"; then
- # skip source files in the noextract=() array
- # these are marked explicitly to NOT be extracted
- return 0
- fi
-
- # do not rely on extension for file type
- local file_type=$(file -bizL "$file")
- local ext=${file##*.}
- local cmd=''
- case "$file_type" in
- *application/x-tar*|*application/zip*|*application/x-zip*|*application/x-cpio*)
- cmd="bsdtar" ;;
- *application/x-gzip*)
- case "$ext" in
- gz|z|Z) cmd="gzip" ;;
- *) return;;
- esac ;;
- *application/x-bzip*)
- case "$ext" in
- bz2|bz) cmd="bzip2" ;;
- *) return;;
- esac ;;
- *application/x-xz*)
- case "$ext" in
- xz) cmd="xz" ;;
- *) return;;
- esac ;;
- *)
- # See if bsdtar can recognize the file
- if bsdtar -tf "$file" -q '*' &>/dev/null; then
- cmd="bsdtar"
- else
- return 0
- fi ;;
- esac
-
- local ret=0
- msg2 "$(gettext "Extracting %s with %s")" "$file" "$cmd"
- if [[ $cmd = "bsdtar" ]]; then
- $cmd -xf "$file" || ret=$?
- else
- rm -f -- "${file%.*}"
- $cmd -dcf "$file" > "${file%.*}" || ret=$?
- fi
- if (( ret )); then
- error "$(gettext "Failed to extract %s")" "$file"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
-
- if (( EUID == 0 )); then
- # change perms of all source files to root user & root group
- chown -R 0:0 "$srcdir"
- fi
-}
-
-download_bzr() {
- local netfile=$1
-
- local url=$(get_url "$netfile")
- if [[ $url != bzr+ssh* ]]; then
- url=${url#bzr+}
- fi
- url=${url%%#*}
-
- local repo=$(get_filename "$netfile")
- local displaylocation="$url"
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
- msg2 "$(gettext "Branching %s ...")" "${displaylocation}"
- if ! bzr branch "$url" "$dir" --no-tree --use-existing-dir; then
- error "$(gettext "Failure while branching %s")" "${displaylocation}"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif (( ! HOLDVER )); then
- msg2 "$(gettext "Pulling %s ...")" "${displaylocation}"
- cd_safe "$dir"
- if ! bzr pull "$url"; then
- # only warn on failure to allow offline builds
- warning "$(gettext "Failure while pulling %s")" "${displaylocation}"
- fi
- fi
-}
-
-extract_bzr() {
- local netfile=$1
-
- local repo=$(get_filename "$netfile")
- local fragment=${netfile#*#}
- if [[ $fragment = "$netfile" ]]; then
- unset fragment
- fi
-
- rev="last:1"
- if [[ -n $fragment ]]; then
- case ${fragment%%=*} in
- revision)
- rev="${fragment#*=}"
- displaylocation="$url -r ${fragment#*=}"
- ;;
- *)
- error "$(gettext "Unrecognized reference: %s")" "${fragment}"
- plain "$(gettext "Aborting...")"
- exit 1
- esac
- fi
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- msg2 "$(gettext "Creating working copy of %s %s repo...")" "${repo}" "bzr"
- pushd "$srcdir" &>/dev/null
-
- if [[ -d "${dir##*/}" ]]; then
- cd_safe "${dir##*/}"
- if ! (bzr pull "$dir" -q --overwrite -r "$rev" && bzr clean-tree -q --detritus --force); then
- error "$(gettext "Failure while updating working copy of %s %s repo")" "${repo}" "bzr"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif ! bzr checkout "$dir" -r "$rev"; then
- error "$(gettext "Failure while creating working copy of %s %s repo")" "${repo}" "bzr"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
-
- popd &>/dev/null
-}
-
-download_git() {
- local netfile=$1
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- local repo=$(get_filename "$netfile")
-
- local url=$(get_url "$netfile")
- url=${url#git+}
- url=${url%%#*}
-
- if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
- msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "git"
- if ! git clone --mirror "$url" "$dir"; then
- error "$(gettext "Failure while downloading %s %s repo")" "${repo}" "git"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif (( ! HOLDVER )); then
- cd_safe "$dir"
- # Make sure we are fetching the right repo
- if [[ "$url" != "$(git config --get remote.origin.url)" ]] ; then
- error "$(gettext "%s is not a clone of %s")" "$dir" "$url"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "git"
- if ! git fetch --all -p; then
- # only warn on failure to allow offline builds
- warning "$(gettext "Failure while updating %s %s repo")" "${repo}" "git"
- fi
- fi
-}
-
-extract_git() {
- local netfile=$1
-
- local fragment=${netfile#*#}
- if [[ $fragment = "$netfile" ]]; then
- unset fragment
- fi
-
- local repo=${netfile##*/}
- repo=${repo%%#*}
- repo=${repo%%.git*}
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- msg2 "$(gettext "Creating working copy of %s %s repo...")" "${repo}" "git"
- pushd "$srcdir" &>/dev/null
-
- local updating=0
- if [[ -d "${dir##*/}" ]]; then
- updating=1
- cd_safe "${dir##*/}"
- if ! git fetch; then
- error "$(gettext "Failure while updating working copy of %s %s repo")" "${repo}" "git"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- cd_safe "$srcdir"
- elif ! git clone "$dir" "${dir##*/}"; then
- error "$(gettext "Failure while creating working copy of %s %s repo")" "${repo}" "git"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
-
- cd_safe "${dir##*/}"
-
- local ref=origin/HEAD
- if [[ -n $fragment ]]; then
- case ${fragment%%=*} in
- commit|tag)
- ref=${fragment##*=}
- ;;
- branch)
- ref=origin/${fragment##*=}
- ;;
- *)
- error "$(gettext "Unrecognized reference: %s")" "${fragment}"
- plain "$(gettext "Aborting...")"
- exit 1
- esac
- fi
-
- if [[ $ref != "origin/HEAD" ]] || (( updating )) ; then
- if ! git checkout --force --no-track -B makepkg $ref; then
- error "$(gettext "Failure while creating working copy of %s %s repo")" "${repo}" "git"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- fi
-
- popd &>/dev/null
-}
-
-download_hg() {
- local netfile=$1
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- local repo=$(get_filename "$netfile")
-
- local url=$(get_url "$netfile")
- url=${url#hg+}
- url=${url%%#*}
-
- if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
- msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "hg"
- if ! hg clone -U "$url" "$dir"; then
- error "$(gettext "Failure while downloading %s %s repo")" "${repo}" "hg"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif (( ! HOLDVER )); then
- msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "hg"
- cd_safe "$dir"
- if ! hg pull; then
- # only warn on failure to allow offline builds
- warning "$(gettext "Failure while updating %s %s repo")" "${repo}" "hg"
- fi
- fi
-}
-
-extract_hg() {
- local netfile=$1
-
- local fragment=${netfile#*#}
- if [[ $fragment = "$netfile" ]]; then
- unset fragment
- fi
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- local repo=${netfile##*/}
- repo=${repo%%#*}
-
- msg2 "$(gettext "Creating working copy of %s %s repo...")" "${repo}" "hg"
- pushd "$srcdir" &>/dev/null
-
- local ref=tip
- if [[ -n $fragment ]]; then
- case ${fragment%%=*} in
- branch|revision|tag)
- ref="${fragment##*=}"
- ;;
- *)
- error "$(gettext "Unrecognized reference: %s")" "${fragment}"
- plain "$(gettext "Aborting...")"
- exit 1
- esac
- fi
-
- if [[ -d "${dir##*/}" ]]; then
- cd_safe "${dir##*/}"
- if ! (hg pull && hg update -C -r "$ref"); then
- error "$(gettext "Failure while updating working copy of %s %s repo")" "${repo}" "hg"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif ! hg clone -u "$ref" "$dir" "${dir##*/}"; then
- error "$(gettext "Failure while creating working copy of %s %s repo")" "${repo}" "hg"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
-
- popd &>/dev/null
-}
-
-download_svn() {
- local netfile=$1
-
- local fragment=${netfile#*#}
- if [[ $fragment = "$netfile" ]]; then
- unset fragment
- fi
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- local repo=$(get_filename "$netfile")
-
- local url=$(get_url "$netfile")
- if [[ $url != svn+ssh* ]]; then
- url=${url#svn+}
- fi
- url=${url%%#*}
-
- local ref=HEAD
- if [[ -n $fragment ]]; then
- case ${fragment%%=*} in
- revision)
- ref="${fragment##*=}"
- ;;
- *)
- error "$(gettext "Unrecognized reference: %s")" "${fragment}"
- plain "$(gettext "Aborting...")"
- exit 1
- esac
- fi
-
- if [[ ! -d "$dir" ]] || dir_is_empty "$dir" ; then
- msg2 "$(gettext "Cloning %s %s repo...")" "${repo}" "svn"
- mkdir -p "$dir/.makepkg"
- if ! svn checkout -r ${ref} --config-dir "$dir/.makepkg" "$url" "$dir"; then
- error "$(gettext "Failure while downloading %s %s repo")" "${repo}" "svn"
- plain "$(gettext "Aborting...")"
- exit 1
- fi
- elif (( ! HOLDVER )); then
- msg2 "$(gettext "Updating %s %s repo...")" "${repo}" "svn"
- cd_safe "$dir"
- if ! svn update -r ${ref}; then
- # only warn on failure to allow offline builds
- warning "$(gettext "Failure while updating %s %s repo")" "${repo}" "svn"
- fi
- fi
-}
-
-extract_svn() {
- local netfile=$1
-
- local dir=$(get_filepath "$netfile")
- [[ -z "$dir" ]] && dir="$SRCDEST/$(get_filename "$netfile")"
-
- local repo=${netfile##*/}
- repo=${repo%%#*}
-
- msg2 "$(gettext "Creating working copy of %s %s repo...")" "${repo}" "svn"
-
- cp -au "$dir" "$srcdir"
-}
-
-download_sources() {
- local netfile all_sources
- local get_source_fn=get_all_sources_for_arch get_vcs=1
-
- msg "$(gettext "Retrieving sources...")"
-
- while true; do
- case $1 in
- allarch)
- get_source_fn=get_all_sources
- ;;
- novcs)
- get_vcs=0
- ;;
- *)
- break 2
- ;;
- esac
- shift
- done
-
- "$get_source_fn" 'all_sources'
- for netfile in "${all_sources[@]}"; do
- pushd "$SRCDEST" &>/dev/null
-
- local proto=$(get_protocol "$netfile")
- case "$proto" in
- local)
- download_local "$netfile"
- ;;
- bzr*)
- (( get_vcs )) && download_bzr "$netfile"
- ;;
- git*)
- (( get_vcs )) && download_git "$netfile"
- ;;
- hg*)
- (( get_vcs )) && download_hg "$netfile"
- ;;
- svn*)
- (( get_vcs )) && download_svn "$netfile"
- ;;
- *)
- download_file "$netfile"
- ;;
- esac
-
- popd &>/dev/null
- done
-}
-
# Automatically update pkgver variable if a pkgver() function is provided
# Re-sources the PKGBUILD afterwards to allow for other variables that use $pkgver
update_pkgver() {
@@ -1226,34 +730,6 @@ check_source_integrity() {
fi
}
-extract_sources() {
- msg "$(gettext "Extracting sources...")"
- local netfile all_sources
-
- get_all_sources_for_arch 'all_sources'
- for netfile in "${all_sources[@]}"; do
- local file=$(get_filename "$netfile")
- local proto=$(get_protocol "$netfile")
- case "$proto" in
- bzr*)
- extract_bzr "$netfile"
- ;;
- git*)
- extract_git "$netfile"
- ;;
- hg*)
- extract_hg "$netfile"
- ;;
- svn*)
- extract_svn "$netfile"
- ;;
- *)
- extract_file "$file"
- ;;
- esac
- done
-}
-
error_function() {
if [[ -p $logpipe ]]; then
rm "$logpipe"