diff options
-rwxr-xr-x | cron-jobs/update-web-db | 72 |
1 files changed, 46 insertions, 26 deletions
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db index 3f4a2fc..8760fc6 100755 --- a/cron-jobs/update-web-db +++ b/cron-jobs/update-web-db @@ -1,51 +1,71 @@ #!/bin/bash -# run at nice 5. it can churn quite a bit of cpu after all. -renice +5 -p $$ > /dev/null +. "$(dirname $0)/../db-functions" +. "$(dirname $0)/../config" # setup paths SPATH="/srv/http/archweb" -ARCHES="i686 x86_64" # having "more important repos" last should make [core] trickle to the top of # the updates list each hour rather than being overwhelmed by big [extra] and # [community] updates -REPOS="community-testing multilib-testing multilib community extra testing core" +REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core') LOGOUT="/tmp/archweb_update.log" # figure out what operation to perform cmd="$(basename $0)" if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then - echo "Invalid command name '$cmd' specified!" - exit 1 + die "Invalid command name '$cmd' specified!" fi +script_lock + +# run at nice 5. it can churn quite a bit of cpu after all. +renice +5 -p $$ > /dev/null + echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}" -## do the dirty, then output the result to a file. -cd $SPATH -for repo in ${REPOS}; do - for arch in ${ARCHES}; do - case "$cmd" in - update-web-db) - dbfile="/srv/ftp/${repo}/os/${arch}/${repo}.db.tar.gz" - flags="" ;; - update-web-files-db) - dbfile="/srv/ftp/${repo}/os/${arch}/${repo}.files.tar.gz" - flags="--filesonly" ;; - esac - if [[ -f $dbfile ]]; then - echo "Updating ${repo}-${arch}" >> "${LOGOUT}" - ./manage.py reporead ${flags} ${arch} ${dbfile} >> "${LOGOUT}" 2>&1 - echo "" >> "${LOGOUT}" - fi - done +case "$cmd" in + update-web-db) + dbfileext="${DBEXT}" + flags="" + ;; + update-web-files-db) + dbfileext="${FILESEXT}" + flags="--filesonly" + ;; +esac + +# Lock the repos and get a copy of the db files to work on +for repo in ${REPOS[@]}; do + for arch in ${ARCHES[@]}; do + repo_lock ${repo} ${arch} || exit 1 + dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}" + if [ -f "${dbfile}" ]; then + mkdir -p "${WORKDIR}/${repo}/${arch}" + cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}" + fi + repo_unlock ${repo} ${arch} + done done +# Run reporead on our db copy +pushd $SPATH >/dev/null +for repo in ${REPOS[@]}; do + for arch in ${ARCHES[@]}; do + dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}" + if [ -f "${dbcopy}" ]; then + echo "Updating ${repo}-${arch}" >> "${LOGOUT}" + ./manage.py reporead ${flags} ${arch} "${dbcopy}" >> "${LOGOUT}" 2>&1 + echo "" >> "${LOGOUT}" + fi + done +done +popd >/dev/null echo "" >> "${LOGOUT}" # rotate the file if it is getting big (> 10M), overwriting any old backup if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then - mv "${LOGOUT}" "${LOGOUT}.old" + mv "${LOGOUT}" "${LOGOUT}.old" fi -# vim: set ts=4 sw=4 et ft=sh: +script_unlock |