summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-16 14:22:45 -0500
committerJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-16 14:22:45 -0500
commitd3bd2a02d927dfe7b1766f869c4bc9319f472c42 (patch)
tree89d5fafc1e841fe40b379b058caf7ece27c1febc
parentbdc95e2d741b95172eaaf119fbb111a3d500809d (diff)
cron-jobs on stable tooxihh/stable
-rwxr-xr-xcron-jobs/ftpdir-cleanup10
-rwxr-xr-xcron-jobs/integrity-check32
-rwxr-xr-xcron-jobs/sourceballs150
-rw-r--r--cron-jobs/sourceballs.force4
-rw-r--r--cron-jobs/sourceballs.skip29
-rwxr-xr-xcron-jobs/update-web-db78
l---------cron-jobs/update-web-files-db1
7 files changed, 302 insertions, 2 deletions
diff --git a/cron-jobs/ftpdir-cleanup b/cron-jobs/ftpdir-cleanup
index 09e8a49..bb1661a 100755
--- a/cron-jobs/ftpdir-cleanup
+++ b/cron-jobs/ftpdir-cleanup
@@ -10,9 +10,12 @@ clean_pkg() {
if ! ${CLEANUP_DRYRUN}; then
for pkg in "$@"; do
if [ -h "$pkg" ]; then
- rm -f "$pkg"
+ rm -f "$pkg" "$pkg.sig"
else
mv -f "$pkg" "$CLEANUP_DESTDIR"
+ if [ -e "$pkg.sig" ]; then
+ mv -f "$pkg.sig" "$CLEANUP_DESTDIR"
+ fi
touch "${CLEANUP_DESTDIR}/$(basename ${pkg})"
fi
done
@@ -100,7 +103,10 @@ if [ ${#old_pkgs[@]} -ge 1 ]; then
msg "Removing old packages from the cleanup directory..."
for old_pkg in ${old_pkgs[@]}; do
msg2 "${old_pkg}"
- ${CLEANUP_DRYRUN} || rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
+ if ! ${CLEANUP_DRYRUN}; then
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
+ rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig"
+ fi
done
fi
diff --git a/cron-jobs/integrity-check b/cron-jobs/integrity-check
new file mode 100755
index 0000000..d4f9694
--- /dev/null
+++ b/cron-jobs/integrity-check
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+dirname="$(dirname $0)"
+
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+
+script_lock
+
+if [ $# -ne 1 ]; then
+ die "usage: $(basename $0) <mailto>"
+fi
+mailto=$1
+
+check() {
+ ${dirname}/check_archlinux/check_packages.py \
+ --repos="${repos}" \
+ --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
+ --repo-dir="${FTP_BASE}" \
+ --arch="${arch}" \
+ 2>&1 | ${dirname}/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
+}
+
+repos='core,extra,community'
+arch='i686'
+check
+
+repos='core,extra,community,multilib'
+arch='x86_64'
+check
+
+script_unlock
diff --git a/cron-jobs/sourceballs b/cron-jobs/sourceballs
new file mode 100755
index 0000000..ee074bd
--- /dev/null
+++ b/cron-jobs/sourceballs
@@ -0,0 +1,150 @@
+#!/bin/bash
+
+dirname="$(dirname $(readlink -e $0))"
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ done
+done
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a readable file for each repo with the following format
+# <pkgbase|pkgname> <pkgver>-<pkgrel> <arch> <license>[ <license>]
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ # Repo does not exist; skip it
+ if [ ! -f "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ bsdtar -xOf "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
+ | awk '/^%NAME%/ { getline b };
+ /^%BASE%/ { getline b };
+ /^%VERSION%/ { getline v };
+ /^%LICENSE%/,/^$/ {
+ if ( !/^%LICENSE%/ ) { l=l" "$0 }
+ };
+ /^%ARCH%/ {
+ getline a;
+ printf "%s %s %s %s\n", b, v, a, l;
+ l="";
+ }'
+ done | sort -u > "${WORKDIR}/db-${repo}"
+done
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+# Create a list of all available source package file names
+find "${ARCH_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+# Check for all packages if we need to build a source package
+for repo in ${PKGREPOS[@]}; do
+ newpkgs=()
+ failedpkgs=()
+ while read line; do
+ pkginfo=(${line})
+ pkgbase=${pkginfo[0]}
+ pkgver=${pkginfo[1]}
+ pkgarch=${pkginfo[2]}
+ pkglicense=(${pkginfo[@]:3})
+
+ # Should this packages be skipped?
+ if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
+ continue
+ fi
+ # Commenting out, we'll sourceball everything
+ # Check if the license or .force file does not enforce creating a source package
+# if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+# continue
+# fi
+ # Store the expected file name of the source package
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Build the source package if its not already there
+ if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then
+ # Check if we had failed before
+ if in_array "${pkgbase}-${pkgver}${SRCEXT}" ${failedpkgs[@]}; then
+ continue
+ fi
+
+ # Get the sources from svn
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
+ #svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
+ # "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+
+ # If it's on official repos, nor [libre], nor [libre-testing]
+ cp -r "${SVNREPO}/$repo/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre-testing/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1
+ if [ $? -ge 1 ]; then
+ failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ continue
+ fi
+
+ # Build the actual source package
+ pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
+ makepkg --nocolor --allsource --ignorearch # >/dev/null 2>&1
+ if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
+ mv "${pkgbase}-${pkgver}${SRCEXT}" "${ARCH_BASE}/${SRCPOOL}"
+ # Avoid creating the same source package for every arch
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
+ newpkgs[${#newpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ else
+ failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ fi
+ popd >/dev/null
+ fi
+ done < "${WORKDIR}/db-${repo}"
+
+ if [ ${#newpkgs[@]} -ge 1 ]; then
+ msg "Adding source packages for [${repo}]..."
+ for new_pkg in ${newpkgs[@]}; do
+ msg2 "${new_pkg}"
+ done
+ fi
+ if [ ${#failedpkgs[@]} -ge 1 ]; then
+ msg "Failed to create source packages for [${repo}]..."
+ for failed_pkg in ${failedpkgs[@]}; do
+ msg2 "${failed_pkg}"
+ done
+ fi
+done
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ ${SOURCE_CLEANUP_DRYRUN} && warning 'dry run mode is active'
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ if ! ${SOURCE_CLEANUP_DRYRUN}; then
+ mv "$ARCH_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find ${SOURCE_CLEANUP_DESTDIR} -type f -name "*${SRCEXT}" -mtime +${SOURCE_CLEANUP_KEEP} -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ ${SOURCE_CLEANUP_DRYRUN} || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+script_unlock
diff --git a/cron-jobs/sourceballs.force b/cron-jobs/sourceballs.force
new file mode 100644
index 0000000..badf15d
--- /dev/null
+++ b/cron-jobs/sourceballs.force
@@ -0,0 +1,4 @@
+faad2
+wxgtk
+wxpython
+glhack
diff --git a/cron-jobs/sourceballs.skip b/cron-jobs/sourceballs.skip
new file mode 100644
index 0000000..14d6f4b
--- /dev/null
+++ b/cron-jobs/sourceballs.skip
@@ -0,0 +1,29 @@
+nexuiz-data
+torcs-data
+tremulous-data
+ufoai-data
+frogatto-data
+vdrift-data
+naev-data
+btanks-data
+wesnoth-data
+texlive-bin
+texlive-bibtexextra
+texlive-core
+texlive-fontsextra
+texlive-formatsextra
+texlive-games
+texlive-genericextra
+texlive-htmlxml
+texlive-humanities
+texlive-langcjk
+texlive-langcyrillic
+texlive-langextra
+texlive-langgreek
+texlive-latexextra
+texlive-music
+texlive-pictures
+texlive-plainextra
+texlive-pstricks
+texlive-publishers
+texlive-science
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
new file mode 100755
index 0000000..6ced4c1
--- /dev/null
+++ b/cron-jobs/update-web-db
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+. "$(dirname $0)/../db-functions"
+. "$(dirname $0)/../config"
+
+# setup paths
+SPATH="/srv/http/archweb"
+ENVPATH="/srv/http/archweb-env/bin/activate"
+
+# having "more important repos" last should make [core] trickle to the top of
+# the updates list each hour rather than being overwhelmed by big [extra] and
+# [community] updates
+REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
+LOGOUT="/tmp/archweb_update.log"
+
+# figure out what operation to perform
+cmd="$(basename $0)"
+if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
+ die "Invalid command name '$cmd' specified!"
+fi
+
+script_lock
+
+# run at nice 5. it can churn quite a bit of cpu after all.
+renice +5 -p $$ > /dev/null
+
+echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}"
+
+# source our virtualenv if it exists
+if [ -f "$ENVPATH" ]; then
+ . "$ENVPATH"
+fi
+
+case "$cmd" in
+ update-web-db)
+ dbfileext="${DBEXT}"
+ flags=""
+ ;;
+ update-web-files-db)
+ dbfileext="${FILESEXT}"
+ flags="--filesonly"
+ ;;
+esac
+
+# Lock the repos and get a copy of the db files to work on
+for repo in ${REPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
+ if [ -f "${dbfile}" ]; then
+ mkdir -p "${WORKDIR}/${repo}/${arch}"
+ cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
+ fi
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+# Run reporead on our db copy
+pushd $SPATH >/dev/null
+for repo in ${REPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
+ if [ -f "${dbcopy}" ]; then
+ echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
+ ./manage.py reporead ${flags} ${arch} "${dbcopy}" >> "${LOGOUT}" 2>&1
+ echo "" >> "${LOGOUT}"
+ fi
+ done
+done
+popd >/dev/null
+echo "" >> "${LOGOUT}"
+
+# rotate the file if it is getting big (> 10M), overwriting any old backup
+if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
+ mv "${LOGOUT}" "${LOGOUT}.old"
+fi
+
+script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
new file mode 120000
index 0000000..0c2c4fa
--- /dev/null
+++ b/cron-jobs/update-web-files-db
@@ -0,0 +1 @@
+update-web-db \ No newline at end of file