#!/bin/bash . "$(dirname $0)/../db-functions" . "$(dirname $0)/../config" # setup paths SPATH="/srv/http/archweb" # having "more important repos" last should make [core] trickle to the top of # the updates list each hour rather than being overwhelmed by big [extra] and # [community] updates REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core') LOGOUT="/tmp/archweb_update.log" # figure out what operation to perform cmd="$(basename $0)" if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then die "Invalid command name '$cmd' specified!" fi script_lock # run at nice 5. it can churn quite a bit of cpu after all. renice +5 -p $$ > /dev/null echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}" case "$cmd" in update-web-db) dbfileext="${DBEXT}" flags="" ;; update-web-files-db) dbfileext="${FILESEXT}" flags="--filesonly" ;; esac # Lock the repos and get a copy of the db files to work on for repo in ${REPOS[@]}; do for arch in ${ARCHES[@]}; do repo_lock ${repo} ${arch} || exit 1 dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}" if [ -f "${dbfile}" ]; then mkdir -p "${WORKDIR}/${repo}/${arch}" cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}" fi repo_unlock ${repo} ${arch} done done # Run reporead on our db copy pushd $SPATH >/dev/null for repo in ${REPOS[@]}; do for arch in ${ARCHES[@]}; do dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}" if [ -f "${dbcopy}" ]; then echo "Updating ${repo}-${arch}" >> "${LOGOUT}" ./manage.py reporead ${flags} ${arch} "${dbcopy}" >> "${LOGOUT}" 2>&1 echo "" >> "${LOGOUT}" fi done done popd >/dev/null echo "" >> "${LOGOUT}" # rotate the file if it is getting big (> 10M), overwriting any old backup if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then mv "${LOGOUT}" "${LOGOUT}.old" fi script_unlock