summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-16 00:44:41 -0500
committerJoshua Ismael Haase Hernández <hahj87@gmail.com>2011-04-16 00:44:41 -0500
commit7203e2e2631e29a79f4ea822c371d4e12c9c1a88 (patch)
treef0ada9061babb0f656afa90e438bba0f0f781241
parentdeab65fad4ced009fb31f7033b1db8ef0af78aee (diff)
parent229a9c504cbd733c93cf91399dc54bedf5160cc5 (diff)
Merge branch 'master' of ~/packages/dbscripts into pruebalukeshu/2011-dbscripts-repo-maintainer-merge
Conflicts: .gitignore db-functions
-rw-r--r--.gitignore2
-rw-r--r--config29
-rw-r--r--cron-jobs/check_archlinux/README8
-rw-r--r--cron-jobs/check_archlinux/alpm.c40
-rwxr-xr-xcron-jobs/check_archlinux/alpm.sobin0 -> 7166 bytes
-rwxr-xr-xcron-jobs/check_archlinux/check_packages.py494
-rwxr-xr-xcron-jobs/check_archlinux/parse_pkgbuilds.sh149
-rw-r--r--cron-jobs/check_archlinux/setup.py10
-rwxr-xr-xcron-jobs/create-filelists101
-rwxr-xr-xcron-jobs/devlist-mailer28
-rwxr-xr-xcron-jobs/ftpdir-cleanup113
-rwxr-xr-xcron-jobs/integrity-check32
-rwxr-xr-xcron-jobs/sourceballs150
-rw-r--r--cron-jobs/sourceballs.force4
-rw-r--r--cron-jobs/sourceballs.skip29
-rwxr-xr-xcron-jobs/sourceballs287
-rwxr-xr-xcron-jobs/update-web-db78
l---------cron-jobs/update-web-files-db1
-rwxr-xr-xdb-move117
-rwxr-xr-xdb-remove46
-rwxr-xr-xdb-repo-add39
-rwxr-xr-xdb-repo-remove36
-rwxr-xr-xdb-update76
-rw-r--r--test/lib/common.inc191
-rw-r--r--test/lib/shunit21116
-rw-r--r--test/packages/pkg-any-a/PKGBUILD12
-rw-r--r--test/packages/pkg-any-b/PKGBUILD12
l---------test/packages/pkg-simple-a/Makefile1
-rw-r--r--test/packages/pkg-simple-a/PKGBUILD22
l---------test/packages/pkg-simple-a/test.c1
l---------test/packages/pkg-simple-b/Makefile1
-rw-r--r--test/packages/pkg-simple-b/PKGBUILD22
l---------test/packages/pkg-simple-b/test.c1
l---------test/packages/pkg-split-a/Makefile1
-rw-r--r--test/packages/pkg-split-a/PKGBUILD28
l---------test/packages/pkg-split-a/test.c1
l---------test/packages/pkg-split-b/Makefile1
-rw-r--r--test/packages/pkg-split-b/PKGBUILD29
l---------test/packages/pkg-split-b/test.c1
-rwxr-xr-xtest/runTest15
-rw-r--r--test/src/Makefile5
-rw-r--r--test/src/test.c7
-rwxr-xr-xtest/test.d/create-filelists.sh110
-rwxr-xr-xtest/test.d/db-move.sh76
-rwxr-xr-xtest/test.d/db-remove.sh52
-rwxr-xr-xtest/test.d/db-update.sh159
-rwxr-xr-xtest/test.d/ftpdir-cleanup.sh93
-rwxr-xr-xtest/test.d/packages.sh11
-rwxr-xr-xtest/test.d/pool-transition.sh152
-rwxr-xr-xtest/test.d/sourceballs.sh84
-rwxr-xr-xtest/test.d/testing2x.sh27
-rwxr-xr-xtesting2x61
52 files changed, 3961 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
index 69dec40..869c26c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,5 @@
*~
*.pyc
local_config
+/config.local
+test/packages/*/*.pkg.tar.*
diff --git a/config b/config
new file mode 100644
index 0000000..217b627
--- /dev/null
+++ b/config
@@ -0,0 +1,29 @@
+FTP_BASE="/home/parabolavnx/parabolagnulinux.org/free"
+ARCH_BASE="/home/parabolavnx/parabolagnulinux.org/repo"
+SVNREPO="/home/parabolavnx/parabolagnulinux.org/abslibre"
+PKGREPOS=('core' 'extra' 'community' 'libre' 'libre-testing' 'social' 'sugar' 'testing')
+PKGPOOL='pool/packages'
+SRCPOOL='sources/packages'
+
+CLEANUP_DESTDIR="$FTP_BASE/old/packages"
+CLEANUP_DRYRUN=false
+# Time in days to keep moved packages
+CLEANUP_KEEP=30
+
+SOURCE_CLEANUP_DESTDIR="$FTP_BASE/old/sources"
+SOURCE_CLEANUP_DRYRUN=false
+# Time in days to keep moved sourcepackages
+SOURCE_CLEANUP_KEEP=30
+
+LOCK_DELAY=10
+LOCK_TIMEOUT=300
+
+STAGING="$FTP_BASE/staging"
+TMPDIR="$HOME/tmp"
+ARCHES=(i686 x86_64 mips64el)
+DBEXT=".db.tar.gz"
+FILESEXT=".files.tar.gz"
+PKGEXT=".pkg.tar.*"
+SRCEXT=".src.tar.gz"
+
+MAKEPKGCONF="$HOME/etc/makepkg.conf"
diff --git a/cron-jobs/check_archlinux/README b/cron-jobs/check_archlinux/README
new file mode 100644
index 0000000..8f178dc
--- /dev/null
+++ b/cron-jobs/check_archlinux/README
@@ -0,0 +1,8 @@
+1) Build the python module
+$ python2 setup.py build
+
+2) copy it back to the current working directory
+$ cp build/lib.*/alpm.* .
+
+3) run the script
+$ ./check_packages.py -h
diff --git a/cron-jobs/check_archlinux/alpm.c b/cron-jobs/check_archlinux/alpm.c
new file mode 100644
index 0000000..0b7cd2c
--- /dev/null
+++ b/cron-jobs/check_archlinux/alpm.c
@@ -0,0 +1,40 @@
+#include <Python.h>
+#include <alpm.h>
+
+static PyObject *
+alpm_vercmp(PyObject *self, PyObject *args)
+{
+ const char *v1, *v2;
+ int ret;
+
+ if (!PyArg_ParseTuple(args, "ss", &v1, &v2))
+ return NULL;
+ ret = alpm_pkg_vercmp(v1, v2);
+ return Py_BuildValue("i", ret);
+}
+
+static PyMethodDef AlpmMethods[] = {
+ {"vercmp", alpm_vercmp, METH_VARARGS,
+ "Execute vercmp."},
+ {NULL, NULL, 0, NULL} /* Sentinel */
+};
+
+PyMODINIT_FUNC
+initalpm(void)
+{
+ (void) Py_InitModule("alpm", AlpmMethods);
+}
+
+int
+main(int argc, char *argv[])
+{
+ /* Pass argv[0] to the Python interpreter */
+ Py_SetProgramName(argv[0]);
+
+ /* Initialize the Python interpreter. Required. */
+ Py_Initialize();
+
+ /* Add a static module */
+ initalpm();
+ return 0;
+}
diff --git a/cron-jobs/check_archlinux/alpm.so b/cron-jobs/check_archlinux/alpm.so
new file mode 100755
index 0000000..50f0f97
--- /dev/null
+++ b/cron-jobs/check_archlinux/alpm.so
Binary files differ
diff --git a/cron-jobs/check_archlinux/check_packages.py b/cron-jobs/check_archlinux/check_packages.py
new file mode 100755
index 0000000..0a2b10e
--- /dev/null
+++ b/cron-jobs/check_archlinux/check_packages.py
@@ -0,0 +1,494 @@
+#!/usr/bin/python2
+#
+# check_archlinux.py
+#
+# Original script by Scott Horowitz <stonecrest@gmail.com>
+# Rewritten by Xavier Chantry <shiningxc@gmail.com>
+#
+# This script currently checks for a number of issues in your ABS tree:
+# 1. Directories with missing PKGBUILDS
+# 2. Invalid PKGBUILDs (bash syntax error for instance)
+# 3. PKGBUILD names that don't match their directory
+# 4. Duplicate PKGBUILDs
+# 5. Valid arch's in PKGBUILDS
+# 6. Missing (make-)dependencies
+# 7. Hierarchy of repos (e.g., that a core package doesn't depend on
+# a non-core package)
+# 8. Circular dependencies
+
+import os,re,commands,getopt,sys,tarfile,alpm
+import pdb
+
+DBEXT='.db.tar.gz'
+
+packages = {} # pkgname : PacmanPackage
+repopkgs = {} # pkgname : PacmanPackage
+provisions = {} # provision : PacmanPackage
+pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies
+invalid_pkgbuilds = []
+missing_pkgbuilds = []
+dups = []
+
+dbonly = []
+absonly = []
+
+mismatches = []
+missing_deps = []
+missing_makedeps = []
+invalid_archs = []
+dep_hierarchy = []
+makedep_hierarchy = []
+circular_deps = [] # pkgname>dep1>dep2>...>pkgname
+checked_deps = []
+
+class PacmanPackage:
+ def __init__(self):
+ self.name,self.version = "",""
+ self.base = ""
+ self.path,self.repo = "",""
+ self.deps,self.makedeps = [],[]
+ self.provides,self.conflicts = [],[]
+ self.archs = []
+
+class Depend:
+ def __init__(self,name,version,mod):
+ self.name = name
+ self.version = version
+ self.mod = mod
+
+def parse_pkgbuilds(repos,arch):
+ for absroot in absroots:
+ for repo in repos:
+ cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh '
+ cmd += arch + ' ' + absroot + '/' + repo
+ (status,output) = commands.getstatusoutput(cmd)
+ if status != 0:
+ print "Error : failed to run '%s'" % cmd
+ sys.exit()
+ parse_data(repo,output)
+
+def parse_data(repo,data):
+ attrname = None
+
+ for line in data.split('\n'):
+ if line.startswith('%'):
+ attrname = line.strip('%').lower()
+ elif line.strip() == '':
+ attrname = None
+ elif attrname == "invalid":
+ if repo in repos:
+ invalid_pkgbuilds.append(line)
+ elif attrname == "missing":
+ if repo in repos:
+ missing_pkgbuilds.append(line)
+ elif attrname == "name":
+ pkg = PacmanPackage()
+ pkg.name = line
+ pkg.repo = repo
+ dup = None
+ if pkg.name in packages:
+ dup = packages[pkg.name]
+ else:
+ packages[pkg.name] = pkg
+ elif attrname == "base":
+ pkg.base = line
+ elif attrname == "version":
+ pkg.version = line
+ elif attrname == "path":
+ pkg.path = line
+ if dup != None and (pkg.repo in repos or dup.repo in repos):
+ dups.append(pkg.path + " vs. " + dup.path)
+ elif attrname == "arch":
+ pkg.archs.append(line)
+ elif attrname == "depends":
+ pkg.deps.append(line)
+ elif attrname == "makedepends":
+ pkg.makedeps.append(line)
+ elif attrname == "conflicts":
+ pkg.conflicts.append(line)
+ elif attrname == "provides":
+ pkg.provides.append(line)
+
+def parse_dbs(repos,arch):
+ dbpkgs = {}
+ for repo in repos:
+ pkgs = set([])
+ db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT))
+ for line in db.getnames():
+ if not '/' in line:
+ pkgs.add(line.rsplit('-',2)[0])
+ dbpkgs[repo] = pkgs
+ return(dbpkgs)
+
+def splitdep(dep):
+ name = dep
+ version = ""
+ mod = ""
+ for char in (">=", "<=", "=", ">", "<"):
+ pos = dep.find(char)
+ if pos > -1:
+ name = dep[:pos]
+ version = dep[pos:].replace(char, "")
+ mod = char
+ break
+ return Depend(name,version,mod)
+
+def splitprov(prov):
+ name = prov
+ version = ""
+ pos = prov.find("=")
+ if pos > -1:
+ name = prov[:pos]
+ version = prov[pos:].replace("=", "")
+ return (name,version)
+
+def vercmp(v1,mod,v2):
+ res = alpm.vercmp(v1,v2)
+ if res == 0:
+ return (mod.find("=") > -1)
+ elif res < 0:
+ return (mod.find("<") > -1)
+ elif res > 0:
+ return (mod.find(">") > -1)
+ return False
+
+
+def depcmp(name,version,dep):
+ if name != dep.name:
+ return False
+ if dep.version == "" or dep.mod == "":
+ return True
+ if version == "":
+ return False
+ return vercmp(version,dep.mod,dep.version)
+
+def provcmp(pkg,dep):
+ for prov in pkg.provides:
+ (provname,provver) = splitprov(prov)
+ if depcmp(provname,provver,dep):
+ return True
+ return False
+
+def verify_dep(dep):
+ dep = splitdep(dep)
+ if dep.name in packages:
+ pkg = packages[dep.name]
+ if depcmp(pkg.name,pkg.version,dep):
+ return [pkg]
+ if dep.name in provisions:
+ provlist = provisions[dep.name]
+ results = []
+ for prov in provlist:
+ if provcmp(prov,dep):
+ results.append(prov)
+ return results
+ return []
+
+def verify_deps(name,repo,deps):
+ pkg_deps = []
+ missdeps = []
+ hierarchy = []
+ for dep in deps:
+ pkglist = verify_dep(dep)
+ if pkglist == []:
+ missdeps.append(repo + "/" + name + " --> '" + dep + "'")
+ else:
+ valid_repos = get_repo_hierarchy(repo)
+ pkgdep = None
+ for pkg in pkglist:
+ if pkg.repo in valid_repos:
+ pkgdep = pkg
+ break
+ if not pkgdep:
+ pkgdep = pkglist[0]
+ hierarchy.append((repo,name,pkgdep))
+
+ pkg_deps.append(pkgdep)
+
+ return (pkg_deps,missdeps,hierarchy)
+
+def compute_deplist(pkg):
+ list = []
+ stack = [pkg]
+ while stack != []:
+ dep = stack.pop()
+ if dep in pkgdeps:
+ for dep2 in pkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ if dep in makepkgdeps:
+ for dep2 in makepkgdeps[dep]:
+ if dep2 not in list:
+ list.append(dep2)
+ stack.append(dep2)
+ return list
+
+def check_hierarchy(deph):
+ hierarchy = []
+ for (repo,name,pkgdep) in deph:
+ deplist = compute_deplist(pkgdep)
+ valid_repos = get_repo_hierarchy(repo)
+ extdeps = []
+ for dep in deplist:
+ if dep.repo not in valid_repos:
+ extdeps.append(dep.name)
+ string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " ("
+ string += "%s extra (make)deps to pull" % len(extdeps)
+ if 0 < len(extdeps) < 10:
+ string += " : " + ' '.join(extdeps)
+ string += ")"
+ hierarchy.append(string)
+ return hierarchy
+
+def get_repo_hierarchy(repo):
+ repo_hierarchy = {'core': ['core'], \
+ 'extra': ['core', 'extra'], \
+ 'community': ['core', 'extra', 'community'], \
+ 'multilib': ['core', 'extra', 'community', 'multilib'] }
+ if repo in repo_hierarchy:
+ return repo_hierarchy[repo]
+ else:
+ return ['core','extra','community']
+
+def verify_archs(name,repo,archs):
+ valid_archs = ['any', 'i686', 'x86_64']
+ invalid_archs = []
+ for arch in archs:
+ if arch not in valid_archs:
+ invalid_archs.append(repo + "/" + name + " --> " + arch)
+ return invalid_archs
+
+def find_scc(packages):
+ # reset all variables
+ global index,S,pkgindex,pkglowlink
+ index = 0
+ S = []
+ pkgindex = {}
+ pkglowlink = {}
+ cycles = []
+ for pkg in packages:
+ tarjan(pkg)
+
+def tarjan(pkg):
+ global index,S,pkgindex,pkglowlink,cycles
+ pkgindex[pkg] = index
+ pkglowlink[pkg] = index
+ index += 1
+ checked_deps.append(pkg)
+ S.append(pkg)
+ deps = []
+ if pkg in pkgdeps:
+ deps = pkgdeps[pkg]
+ for dep in deps:
+ if dep not in pkgindex:
+ tarjan(dep)
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep])
+ elif dep in S:
+ pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep])
+ if pkglowlink[pkg] == pkgindex[pkg]:
+ dep = S.pop()
+ if pkg == dep:
+ return
+ path = pkg.name
+ while pkg != dep:
+ path = dep.repo + "/" + dep.name + ">" + path
+ dep = S.pop()
+ path = dep.name + ">" + path
+ if pkg.repo in repos:
+ circular_deps.append(path)
+
+def print_heading(heading):
+ print ""
+ print "=" * (len(heading) + 4)
+ print "= " + heading + " ="
+ print "=" * (len(heading) + 4)
+
+def print_subheading(subheading):
+ print ""
+ print subheading
+ print "-" * (len(subheading) + 2)
+
+def print_missdeps(pkgname,missdeps) :
+ for d in missdeps:
+ print pkgname + " : " + d
+
+def print_result(list, subheading):
+ if len(list) > 0:
+ list.sort()
+ print_subheading(subheading)
+ for item in list:
+ print item
+
+def print_results():
+ print_result(missing_pkgbuilds, "Missing PKGBUILDs")
+ print_result(invalid_pkgbuilds, "Invalid PKGBUILDs")
+ print_result(mismatches, "Mismatched Pkgnames")
+ print_result(dups, "Duplicate PKGBUILDs")
+ print_result(invalid_archs, "Invalid Archs")
+ print_result(missing_deps, "Missing Dependencies")
+ print_result(missing_makedeps, "Missing Makedepends")
+ print_result(dep_hierarchy, "Repo Hierarchy for Dependencies")
+ print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends")
+ print_result(circular_deps, "Circular Dependencies")
+ print_result(dbonly, "Packages found in db, but not in tree")
+ print_result(absonly,"Packages found in tree, but not in db")
+ print_subheading("Summary")
+ print "Missing PKGBUILDs: ", len(missing_pkgbuilds)
+ print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds)
+ print "Mismatching PKGBUILD names: ", len(mismatches)
+ print "Duplicate PKGBUILDs: ", len(dups)
+ print "Invalid archs: ", len(invalid_archs)
+ print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps)
+ print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy)
+ print "Circular dependencies: ", len(circular_deps)
+ print "In db, but not in tree: ", len(dbonly)
+ print "In tree, but not in db: ", len(absonly)
+ print ""
+
+def print_usage():
+ print ""
+ print "Usage: ./check_packages.py [OPTION]"
+ print ""
+ print "Options:"
+ print " --abs-tree=<path[,path]> Check the specified tree(s) (default : /var/abs)"
+ print " --repos=<r1,r2,...> Check the specified repos (default : core,extra)"
+ print " --arch=<i686|x86_64> Check the specified arch (default : i686)"
+ print " --repo-dir=<path> Check the dbs at the specified path (default : /srv/ftp)"
+ print " -h, --help Show this help and exit"
+ print ""
+ print "Examples:"
+ print "\n Check core and extra in existing abs tree:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686"
+ print "\n Check community:"
+ print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686"
+ print ""
+
+## Default path to the abs root directory
+absroots = ["/var/abs"]
+## Default list of repos to check
+repos = ['core', 'extra']
+## Default arch
+arch = "i686"
+## Default repodir
+repodir = "/srv/ftp"
+
+try:
+ opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=",
+ "arch=", "repo-dir="])
+except getopt.GetoptError:
+ print_usage()
+ sys.exit()
+if opts != []:
+ for o, a in opts:
+ if o in ("--abs-tree"):
+ absroots = a.split(',')
+ elif o in ("--repos"):
+ repos = a.split(",")
+ elif o in ("--arch"):
+ arch = a
+ elif o in ("--repo-dir"):
+ repodir = a
+ else:
+ print_usage()
+ sys.exit()
+ if args != []:
+ print_usage()
+ sys.exit()
+
+for absroot in absroots:
+ if not os.path.isdir(absroot):
+ print "Error : the abs tree " + absroot + " does not exist"
+ sys.exit()
+ for repo in repos:
+ repopath = absroot + "/" + repo
+ if not os.path.isdir(repopath):
+ print("Warning : the repository " + repo + " does not exist in " + absroot)
+
+if not os.path.isdir(repodir):
+ print "Error: the repository directory %s does not exist" % repodir
+ sys.exit()
+for repo in repos:
+ path = os.path.join(repodir,repo,'os',arch,repo + DBEXT)
+ if not os.path.isfile(path):
+ print "Error : repo DB %s : File not found" % path
+ sys.exit()
+ if not tarfile.is_tarfile(path):
+ print "Error : Cant open repo DB %s, not a valid tar file" % path
+ sys.exit()
+# repos which need to be loaded
+loadrepos = set([])
+for repo in repos:
+ loadrepos = loadrepos | set(get_repo_hierarchy(repo))
+
+print_heading("Integrity Check " + arch + " of " + ",".join(repos))
+print("\nPerforming integrity checks...")
+
+print("==> parsing pkgbuilds")
+parse_pkgbuilds(loadrepos,arch)
+
+# fill provisions
+for name,pkg in packages.iteritems():
+ for prov in pkg.provides:
+ provname=prov.split("=")[0]
+ if provname not in provisions:
+ provisions[provname] = []
+ provisions[provname].append(pkg)
+
+# fill repopkgs
+for name,pkg in packages.iteritems():
+ if pkg.repo in repos:
+ repopkgs[name] = pkg
+
+print("==> parsing db files")
+dbpkgs = parse_dbs(repos,arch)
+
+print("==> checking mismatches")
+for name,pkg in repopkgs.iteritems():
+ pkgdirname = pkg.path.split("/")[-1]
+ if name != pkgdirname and pkg.base != pkgdirname:
+ mismatches.append(name + " vs. " + pkg.path)
+
+print("==> checking archs")
+for name,pkg in repopkgs.iteritems():
+ archs = verify_archs(name,pkg.repo,pkg.archs)
+ invalid_archs.extend(archs)
+
+deph,makedeph = [],[]
+
+print("==> checking dependencies")
+for name,pkg in repopkgs.iteritems():
+ (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+ missing_deps.extend(missdeps)
+ deph.extend(hierarchy)
+
+print("==> checking makedepends")
+for name,pkg in repopkgs.iteritems():
+ (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps)
+ makepkgdeps[pkg] = makedeps
+ missing_makedeps.extend(missdeps)
+ makedeph.extend(hierarchy)
+
+print("==> checking hierarchy")
+dep_hierarchy = check_hierarchy(deph)
+makedep_hierarchy = check_hierarchy(makedeph)
+
+print("==> checking for circular dependencies")
+# make sure pkgdeps is filled for every package
+for name,pkg in packages.iteritems():
+ if pkg not in pkgdeps:
+ (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps)
+ pkgdeps[pkg] = deps
+find_scc(repopkgs.values())
+
+print("==> checking for differences between db files and pkgbuilds")
+for repo in repos:
+ for pkg in dbpkgs[repo]:
+ if not (pkg in repopkgs and repopkgs[pkg].repo == repo):
+ dbonly.append("%s/%s" % (repo,pkg))
+for name,pkg in repopkgs.iteritems():
+ if not name in dbpkgs[pkg.repo]:
+ absonly.append("%s/%s" % (pkg.repo,name))
+
+print_results()
diff --git a/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/cron-jobs/check_archlinux/parse_pkgbuilds.sh
new file mode 100755
index 0000000..5cd17e4
--- /dev/null
+++ b/cron-jobs/check_archlinux/parse_pkgbuilds.sh
@@ -0,0 +1,149 @@
+#!/bin/bash
+
+# Usage : parse_pkgbuilds.sh arch <pkgbuilds_dir1,dir2,...>
+# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra
+
+exit() { return; }
+
+splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts')
+variables=('pkgname' 'pkgbase' 'pkgver' 'pkgrel' 'makedepends' 'arch' ${splitpkg_overrides[@]})
+readonly -a variables splitpkg_overrides
+
+backup_package_variables() {
+ for var in ${splitpkg_overrides[@]}; do
+ indirect="${var}_backup"
+ eval "${indirect}=(\${$var[@]})"
+ done
+}
+
+restore_package_variables() {
+ for var in ${splitpkg_overrides[@]}; do
+ indirect="${var}_backup"
+ if [ -n "${!indirect}" ]; then
+ eval "${var}=(\${$indirect[@]})"
+ else
+ unset ${var}
+ fi
+ done
+}
+
+print_info() {
+ echo -e "%NAME%\n$pkgname\n"
+ echo -e "%VERSION%\n$pkgver-$pkgrel\n"
+ echo -e "%PATH%\n$dir\n"
+
+ if [ -n "$pkgbase" ]; then
+ echo -e "%BASE%\n$pkgbase\n"
+ fi
+
+ if [ -n "$arch" ]; then
+ echo "%ARCH%"
+ for i in ${arch[@]}; do echo $i; done
+ echo ""
+ fi
+ if [ -n "$depends" ]; then
+ echo "%DEPENDS%"
+ for i in ${depends[@]}; do
+ echo $i
+ done
+ echo ""
+ fi
+ if [ -n "$makedepends" ]; then
+ echo "%MAKEDEPENDS%"
+ for i in ${makedepends[@]}; do
+ echo $i
+ done
+ echo ""
+ fi
+ if [ -n "$conflicts" ]; then
+ echo "%CONFLICTS%"
+ for i in ${conflicts[@]}; do echo $i; done
+ echo ""
+ fi
+ if [ -n "$provides" ]; then
+ echo "%PROVIDES%"
+ for i in ${provides[@]}; do echo $i; done
+ echo ""
+ fi
+}
+
+source_pkgbuild() {
+ ret=0
+ dir=$1
+ pkgbuild=$dir/PKGBUILD
+ for var in ${variables[@]}; do
+ unset ${var}
+ done
+ source $pkgbuild &>/dev/null || ret=$?
+
+ # ensure $pkgname and $pkgver variables were found
+ if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ fi
+
+ if [ "${#pkgname[@]}" -gt "1" ]; then
+ pkgbase=${pkgbase:-${pkgname[0]}}
+ for pkg in ${pkgname[@]}; do
+ if [ "$(type -t package_${pkg})" != "function" ]; then
+ echo -e "%INVALID%\n$pkgbuild\n"
+ return 1
+ else
+ backup_package_variables
+ pkgname=$pkg
+ while IFS= read -r line; do
+ var=${line%%=*}
+ var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters
+ for realvar in ${variables[@]}; do
+ if [ "$var" == "$realvar" ]; then
+ eval $line
+ break
+ fi
+ done
+ done < <(type package_${pkg})
+ print_info
+ restore_package_variables
+ fi
+ done
+ else
+ echo
+ print_info
+ fi
+
+ return 0
+}
+
+find_pkgbuilds() {
+ #Skip over some dirs
+ local d="${1##*/}"
+ if [ "$d" = "CVS" -o "$d" = ".svn" ]; then
+ return
+ fi
+
+ if [ -f $1/PKGBUILD ]; then
+ source_pkgbuild $1
+ return
+ fi
+ empty=1
+ for dir in $1/*; do
+ if [ -d $dir ]; then
+ find_pkgbuilds $dir
+ unset empty
+ fi
+ done
+ if [ -n "$empty" ]; then
+ echo -e "%MISSING%\n$1\n"
+ fi
+}
+
+if [ -z "$1" -o -z "$2" ]; then
+ exit 1
+fi
+
+CARCH=$1
+shift
+for dir in "$@"; do
+ find_pkgbuilds $dir
+done
+
+exit 0
diff --git a/cron-jobs/check_archlinux/setup.py b/cron-jobs/check_archlinux/setup.py
new file mode 100644
index 0000000..b172752
--- /dev/null
+++ b/cron-jobs/check_archlinux/setup.py
@@ -0,0 +1,10 @@
+from distutils.core import setup, Extension
+
+alpm = Extension('alpm',
+ libraries = ['alpm'],
+ sources = ['alpm.c'])
+
+setup (name = 'Alpm',
+ version = '1.0',
+ description = 'Alpm bindings',
+ ext_modules = [alpm])
diff --git a/cron-jobs/create-filelists b/cron-jobs/create-filelists
new file mode 100755
index 0000000..8bcfc0b
--- /dev/null
+++ b/cron-jobs/create-filelists
@@ -0,0 +1,101 @@
+#!/bin/bash
+
+. "$(dirname $0)/../db-functions"
+. "$(dirname $0)/../config"
+
+script_lock
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ done
+done
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repodb="${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}"
+ filedb="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
+
+ if [ ! -f "${repodb}" ]; then
+ continue
+ fi
+ # get a list of package files defined in the repo db
+ mkdir -p "${WORKDIR}/db-dir-${repo}-${arch}"
+ bsdtar -xf "${repodb}" -C "${WORKDIR}/db-dir-${repo}-${arch}"
+ # This should actualy be faster than reading all the just extracted files
+ bsdtar -xOf "${repodb}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}"
+
+ # get a list of package files defined in the files db
+ mkdir -p "${WORKDIR}/files-current-dir-${repo}-${arch}"
+ if [ ! -f "${filedb}" ]; then
+ touch "${WORKDIR}/files-${repo}-${arch}"
+ else
+ bsdtar -xf "${filedb}" -C "${WORKDIR}/files-current-dir-${repo}-${arch}"
+ bsdtar -xOf "${filedb}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/files-${repo}-${arch}"
+ fi
+ done
+done
+
+case "${FILESEXT}" in
+ *.gz) TAR_OPT="z" ;;
+ *.bz2) TAR_OPT="j" ;;
+ *.xz) TAR_OPT="J" ;;
+ *) die "Unknown compression type for FILESEXT=${FILESEXT}" ;;
+esac
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ filedb="${FTP_BASE}/${repo}/os/${arch}/${repo}${FILESEXT}"
+
+ if [ ! -f "${WORKDIR}/db-${repo}-${arch}" ]; then
+ # remove any files db that might be in this empty repo
+ if [ -f "${filedb}" ]; then
+ rm -f "${filedb}"
+ fi
+ continue
+ fi
+
+ # Check if updating the files db is needed
+ if ! diff -q "${WORKDIR}/db-${repo}-${arch}" "${WORKDIR}/files-${repo}-${arch}" >/dev/null; then
+ mkdir -p "${WORKDIR}/files-new-dir-${repo}-${arch}"
+
+ # Include all unchanged file lists
+ # Note: deleted packages are implicitly excluded
+ for f in $(comm -12 "${WORKDIR}/db-${repo}-${arch}" "${WORKDIR}/files-${repo}-${arch}"); do
+ mv "${WORKDIR}/files-current-dir-${repo}-${arch}/${f%*-*${PKGEXT}}" \
+ "${WORKDIR}/files-new-dir-${repo}-${arch}"
+ done
+
+ # Create file lists for new packages
+ for f in $(comm -23 "${WORKDIR}/db-${repo}-${arch}" "${WORKDIR}/files-${repo}-${arch}"); do
+ tdir="${WORKDIR}/files-new-dir-${repo}-${arch}/${f%*-*${PKGEXT}}"
+ mkdir "${tdir}"
+ echo '%FILES%' > "${tdir}/files"
+ bsdtar --exclude=.* -tf "${FTP_BASE}/${repo}/os/${arch}/${f}" >> "${tdir}/files"
+
+ # add desc and depends file from db
+ dbdir="${WORKDIR}/db-dir-${repo}-${arch}/${f%*-*${PKGEXT}}"
+ mv "${dbdir}/desc" "${tdir}/desc"
+ mv "${dbdir}/depends" "${tdir}/depends"
+ done
+
+ # Create the actual file db
+ pushd "${WORKDIR}/files-new-dir-${repo}-${arch}" >/dev/null
+ bsdtar -c${TAR_OPT}f "${WORKDIR}/${arch}-${repo}${FILESEXT}" *
+ popd >/dev/null
+ mv -f "${WORKDIR}/${arch}-${repo}${FILESEXT}" "${filedb}"
+ ln -sf "${repo}${FILESEXT}" "${filedb%.tar.*}"
+ fi
+ done
+done
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+script_unlock
diff --git a/cron-jobs/devlist-mailer b/cron-jobs/devlist-mailer
new file mode 100755
index 0000000..ca2e46b
--- /dev/null
+++ b/cron-jobs/devlist-mailer
@@ -0,0 +1,28 @@
+#!/bin/bash
+#Dummy helper to send email to arch-dev
+# It does nothing if no output
+
+LIST="arch-dev-public@archlinux.org"
+#LIST="aaronmgriffin@gmail.com"
+FROM="repomaint@archlinux.org"
+
+SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")"
+if [ $# -ge 1 ]; then
+ SUBJECT="$1 $(date +"%d-%m-%Y")"
+fi
+
+if [ $# -ge 2 ]; then
+ LIST="$2"
+fi
+
+stdin="$(cat)"
+#echo used to strip whitespace for checking for actual data
+if [ -n "$(echo $stdin)" ]; then
+
+echo "Subject: $SUBJECT
+To: $LIST
+From: $FROM
+
+$stdin" | /usr/sbin/sendmail -F$FROM "$LIST"
+
+fi
diff --git a/cron-jobs/ftpdir-cleanup b/cron-jobs/ftpdir-cleanup
new file mode 100755
index 0000000..09e8a49
--- /dev/null
+++ b/cron-jobs/ftpdir-cleanup
@@ -0,0 +1,113 @@
+#!/bin/bash
+
+. "$(dirname $0)/../db-functions"
+. "$(dirname $0)/../config"
+
+clean_pkg() {
+ local pkg
+ local target
+
+ if ! ${CLEANUP_DRYRUN}; then
+ for pkg in "$@"; do
+ if [ -h "$pkg" ]; then
+ rm -f "$pkg"
+ else
+ mv -f "$pkg" "$CLEANUP_DESTDIR"
+ touch "${CLEANUP_DESTDIR}/$(basename ${pkg})"
+ fi
+ done
+ fi
+}
+
+script_lock
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ done
+done
+
+${CLEANUP_DRYRUN} && warning 'dry run mode is active'
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ # get a list of actual available package files
+ find "${FTP_BASE}/${repo}/os/${arch}" -xtype f -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/repo-${repo}-${arch}"
+ # get a list of package files defined in the repo db
+ bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}"
+
+ missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#missing_pkgs[@]} -ge 1 ]; then
+ error "Missing packages in [${repo}] (${arch})..."
+ for missing_pkg in ${missing_pkgs[@]}; do
+ msg2 "${missing_pkg}"
+ done
+ fi
+
+ old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}"))
+ if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from [${repo}] (${arch})..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}"
+ done
+ fi
+ done
+done
+
+# get a list of all available packages in the pacakge pool
+find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool"
+# create a list of packages in our db
+cat "${WORKDIR}/db-"* | sort -u > "${WORKDIR}/db"
+
+old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db"))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from package pool..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}"
+ done
+fi
+
+# cleanup of legacy $repo/os/any directories
+for repo in ${PKGREPOS[@]}; do
+ if [ ! -d "${FTP_BASE}/${repo}/os/any" ]; then
+ continue
+ fi
+ if [ -n "$(find "${FTP_BASE}/${repo}/os/any" -type d -empty)" ]; then
+ msg "Removing empty legacy directory ${repo}/os/any"
+ ${CLEANUP_DRYRUN} || rmdir "${FTP_BASE}/${repo}/os/any"
+ continue
+ fi
+ find "${FTP_BASE}/${repo}/os/any" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/any-${repo}"
+ cat "${WORKDIR}/db-${repo}-"* | sort -u > "${WORKDIR}/all-${repo}"
+
+ old_pkgs=($(comm -23 "${WORKDIR}/any-${repo}" "${WORKDIR}/all-${repo}"))
+ if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from [${repo}] (any)..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ clean_pkg "${FTP_BASE}/${repo}/os/any/${old_pkg}"
+ done
+ fi
+done
+
+old_pkgs=($(find ${CLEANUP_DESTDIR} -type f -name "*${PKGEXT}" -mtime +${CLEANUP_KEEP} -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old packages from the cleanup directory..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ ${CLEANUP_DRYRUN} || rm -f "${CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+script_unlock
diff --git a/cron-jobs/integrity-check b/cron-jobs/integrity-check
new file mode 100755
index 0000000..d4f9694
--- /dev/null
+++ b/cron-jobs/integrity-check
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+dirname="$(dirname $0)"
+
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+
+script_lock
+
+if [ $# -ne 1 ]; then
+ die "usage: $(basename $0) <mailto>"
+fi
+mailto=$1
+
+check() {
+ ${dirname}/check_archlinux/check_packages.py \
+ --repos="${repos}" \
+ --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \
+ --repo-dir="${FTP_BASE}" \
+ --arch="${arch}" \
+ 2>&1 | ${dirname}/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}"
+}
+
+repos='core,extra,community'
+arch='i686'
+check
+
+repos='core,extra,community,multilib'
+arch='x86_64'
+check
+
+script_unlock
diff --git a/cron-jobs/sourceballs b/cron-jobs/sourceballs
new file mode 100755
index 0000000..ee074bd
--- /dev/null
+++ b/cron-jobs/sourceballs
@@ -0,0 +1,150 @@
+#!/bin/bash
+
+dirname="$(dirname $(readlink -e $0))"
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ done
+done
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a readable file for each repo with the following format
+# <pkgbase|pkgname> <pkgver>-<pkgrel> <arch> <license>[ <license>]
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ # Repo does not exist; skip it
+ if [ ! -f "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then
+ continue
+ fi
+ bsdtar -xOf "${ARCH_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \
+ | awk '/^%NAME%/ { getline b };
+ /^%BASE%/ { getline b };
+ /^%VERSION%/ { getline v };
+ /^%LICENSE%/,/^$/ {
+ if ( !/^%LICENSE%/ ) { l=l" "$0 }
+ };
+ /^%ARCH%/ {
+ getline a;
+ printf "%s %s %s %s\n", b, v, a, l;
+ l="";
+ }'
+ done | sort -u > "${WORKDIR}/db-${repo}"
+done
+
+for repo in ${PKGREPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+# Create a list of all available source package file names
+find "${ARCH_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+# Check for all packages if we need to build a source package
+for repo in ${PKGREPOS[@]}; do
+ newpkgs=()
+ failedpkgs=()
+ while read line; do
+ pkginfo=(${line})
+ pkgbase=${pkginfo[0]}
+ pkgver=${pkginfo[1]}
+ pkgarch=${pkginfo[2]}
+ pkglicense=(${pkginfo[@]:3})
+
+ # Should this packages be skipped?
+ if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then
+ continue
+ fi
+ # Commenting out, we'll sourceball everything
+ # Check if the license or .force file does not enforce creating a source package
+# if ! (chk_license ${pkglicense[@]} || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then
+# continue
+# fi
+ # Store the expected file name of the source package
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Build the source package if its not already there
+ if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then
+ # Check if we had failed before
+ if in_array "${pkgbase}-${pkgver}${SRCEXT}" ${failedpkgs[@]}; then
+ continue
+ fi
+
+ # Get the sources from svn
+ mkdir -p "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}"
+ #svn export -q "${SVNREPO}/${pkgbase}/repos/${repo}-${pkgarch}" \
+ # "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1
+
+ # If it's on official repos, nor [libre], nor [libre-testing]
+ cp -r "${SVNREPO}/$repo/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1 || \
+ cp -r "${SVNREPO}/libre-testing/${pkgbase}" "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/" >/dev/null 2>&1
+ if [ $? -ge 1 ]; then
+ failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ continue
+ fi
+
+ # Build the actual source package
+ pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null
+ makepkg --nocolor --allsource --ignorearch # >/dev/null 2>&1
+ if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then
+ mv "${pkgbase}-${pkgver}${SRCEXT}" "${ARCH_BASE}/${SRCPOOL}"
+ # Avoid creating the same source package for every arch
+ echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs"
+ newpkgs[${#newpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ else
+ failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}${SRCEXT}"
+ fi
+ popd >/dev/null
+ fi
+ done < "${WORKDIR}/db-${repo}"
+
+ if [ ${#newpkgs[@]} -ge 1 ]; then
+ msg "Adding source packages for [${repo}]..."
+ for new_pkg in ${newpkgs[@]}; do
+ msg2 "${new_pkg}"
+ done
+ fi
+ if [ ${#failedpkgs[@]} -ge 1 ]; then
+ msg "Failed to create source packages for [${repo}]..."
+ for failed_pkg in ${failedpkgs[@]}; do
+ msg2 "${failed_pkg}"
+ done
+ fi
+done
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ ${SOURCE_CLEANUP_DRYRUN} && warning 'dry run mode is active'
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ if ! ${SOURCE_CLEANUP_DRYRUN}; then
+ mv "$ARCH_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find ${SOURCE_CLEANUP_DESTDIR} -type f -name "*${SRCEXT}" -mtime +${SOURCE_CLEANUP_KEEP} -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ ${SOURCE_CLEANUP_DRYRUN} || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+script_unlock
diff --git a/cron-jobs/sourceballs.force b/cron-jobs/sourceballs.force
new file mode 100644
index 0000000..badf15d
--- /dev/null
+++ b/cron-jobs/sourceballs.force
@@ -0,0 +1,4 @@
+faad2
+wxgtk
+wxpython
+glhack
diff --git a/cron-jobs/sourceballs.skip b/cron-jobs/sourceballs.skip
new file mode 100644
index 0000000..14d6f4b
--- /dev/null
+++ b/cron-jobs/sourceballs.skip
@@ -0,0 +1,29 @@
+nexuiz-data
+torcs-data
+tremulous-data
+ufoai-data
+frogatto-data
+vdrift-data
+naev-data
+btanks-data
+wesnoth-data
+texlive-bin
+texlive-bibtexextra
+texlive-core
+texlive-fontsextra
+texlive-formatsextra
+texlive-games
+texlive-genericextra
+texlive-htmlxml
+texlive-humanities
+texlive-langcjk
+texlive-langcyrillic
+texlive-langextra
+texlive-langgreek
+texlive-latexextra
+texlive-music
+texlive-pictures
+texlive-plainextra
+texlive-pstricks
+texlive-publishers
+texlive-science
diff --git a/cron-jobs/sourceballs2 b/cron-jobs/sourceballs2
new file mode 100755
index 0000000..452208e
--- /dev/null
+++ b/cron-jobs/sourceballs2
@@ -0,0 +1,87 @@
+#!/bin/bash
+
+dirname="$(dirname $(readlink -e $0))"
+. "${dirname}/../db-functions"
+. "${dirname}/../config"
+. "${MAKEPKGCONF}"
+
+pushd "${WORKDIR}" >/dev/null
+
+script_lock
+
+#adjust the nice level to run at a lower priority
+renice +10 -p $$ > /dev/null
+
+# Create a list of all available source package file names
+find "${ARCH_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs"
+
+# Steps
+# Traverse the ABSLibre
+# Makepkg --allsource every package
+# Remove the old packages
+pushd "${SVNREPO}" >/dev/null
+
+failedpkgs=()
+for repo in ${PKGREPOS[@]}; do
+ pushd $repo >/dev/null
+ find . -maxdepth 1 -type d | while read pkg; do
+ pushd "${SVNREPO}/$repo/$pkg" >/dev/null
+
+ [[ ! -e PKGBUILD ]] && {
+ warning "$repo/$pkg is not a package"
+ continue
+ }
+
+ unset pkgbase pkgname
+ source PKGBUILD
+ pkgbase=${pkgbase:-$pkgname}
+
+ echo "${pkgbase}-${pkgver}-${pkgrel}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs"
+
+ # Skip already sourceballed
+ [[ -e "${SRCPKGDEST}/${pkgbase}-${pkgver}-${pkgrel}${SRCEXT}" ]] && \
+ continue
+
+ makepkg --allsource --ignorearch -c >/dev/null 2>&1
+
+ [[ $? -ne 0 ]] && \
+ failedpkgs[${#failedpkgs[*]}]="${pkgbase}-${pkgver}-${pkgrel}${SRCEXT}"
+
+ done
+ popd >/dev/null
+done
+
+# Cleanup old source packages
+cat "${WORKDIR}/expected-src-pkgs" | sort -u > "${WORKDIR}/expected-src-pkgs.sort"
+cat "${WORKDIR}/available-src-pkgs" | sort -u > "${WORKDIR}/available-src-pkgs.sort"
+old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort"))
+
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages..."
+ ${SOURCE_CLEANUP_DRYRUN} && warning 'dry run mode is active'
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ if ! ${SOURCE_CLEANUP_DRYRUN}; then
+ mv "$ARCH_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}"
+ touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ fi
+ done
+fi
+
+old_pkgs=($(find ${SOURCE_CLEANUP_DESTDIR} -type f -name "*${SRCEXT}" -mtime +${SOURCE_CLEANUP_KEEP} -printf '%f\n'))
+if [ ${#old_pkgs[@]} -ge 1 ]; then
+ msg "Removing old source packages from the cleanup directory..."
+ for old_pkg in ${old_pkgs[@]}; do
+ msg2 "${old_pkg}"
+ ${SOURCE_CLEANUP_DRYRUN} || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}"
+ done
+fi
+
+msg "Failed"
+for _fail in ${failedpkgs[@]}; do
+ msg2 "$_fail"
+done
+
+
+script_unlock
+
diff --git a/cron-jobs/update-web-db b/cron-jobs/update-web-db
new file mode 100755
index 0000000..6ced4c1
--- /dev/null
+++ b/cron-jobs/update-web-db
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+. "$(dirname $0)/../db-functions"
+. "$(dirname $0)/../config"
+
+# setup paths
+SPATH="/srv/http/archweb"
+ENVPATH="/srv/http/archweb-env/bin/activate"
+
+# having "more important repos" last should make [core] trickle to the top of
+# the updates list each hour rather than being overwhelmed by big [extra] and
+# [community] updates
+REPOS=('community-testing' 'multilib-testing' 'multilib' 'community' 'extra' 'testing' 'core')
+LOGOUT="/tmp/archweb_update.log"
+
+# figure out what operation to perform
+cmd="$(basename $0)"
+if [[ $cmd != "update-web-db" && $cmd != "update-web-files-db" ]]; then
+ die "Invalid command name '$cmd' specified!"
+fi
+
+script_lock
+
+# run at nice 5. it can churn quite a bit of cpu after all.
+renice +5 -p $$ > /dev/null
+
+echo "$cmd: Updating DB at $(date)" >> "${LOGOUT}"
+
+# source our virtualenv if it exists
+if [ -f "$ENVPATH" ]; then
+ . "$ENVPATH"
+fi
+
+case "$cmd" in
+ update-web-db)
+ dbfileext="${DBEXT}"
+ flags=""
+ ;;
+ update-web-files-db)
+ dbfileext="${FILESEXT}"
+ flags="--filesonly"
+ ;;
+esac
+
+# Lock the repos and get a copy of the db files to work on
+for repo in ${REPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${arch} || exit 1
+ dbfile="/srv/ftp/${repo}/os/${arch}/${repo}${dbfileext}"
+ if [ -f "${dbfile}" ]; then
+ mkdir -p "${WORKDIR}/${repo}/${arch}"
+ cp "${dbfile}" "${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
+ fi
+ repo_unlock ${repo} ${arch}
+ done
+done
+
+# Run reporead on our db copy
+pushd $SPATH >/dev/null
+for repo in ${REPOS[@]}; do
+ for arch in ${ARCHES[@]}; do
+ dbcopy="${WORKDIR}/${repo}/${arch}/${repo}${dbfileext}"
+ if [ -f "${dbcopy}" ]; then
+ echo "Updating ${repo}-${arch}" >> "${LOGOUT}"
+ ./manage.py reporead ${flags} ${arch} "${dbcopy}" >> "${LOGOUT}" 2>&1
+ echo "" >> "${LOGOUT}"
+ fi
+ done
+done
+popd >/dev/null
+echo "" >> "${LOGOUT}"
+
+# rotate the file if it is getting big (> 10M), overwriting any old backup
+if [[ $(stat -c%s "${LOGOUT}") -gt 10485760 ]]; then
+ mv "${LOGOUT}" "${LOGOUT}.old"
+fi
+
+script_unlock
diff --git a/cron-jobs/update-web-files-db b/cron-jobs/update-web-files-db
new file mode 120000
index 0000000..0c2c4fa
--- /dev/null
+++ b/cron-jobs/update-web-files-db
@@ -0,0 +1 @@
+update-web-db \ No newline at end of file
diff --git a/db-move b/db-move
new file mode 100755
index 0000000..ae21781
--- /dev/null
+++ b/db-move
@@ -0,0 +1,117 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -lt 3 ]; then
+ msg "usage: $(basename $0) <repo-from> <repo-to> <pkgname|pkgbase> ..."
+ exit 1
+fi
+
+args=(${@})
+repo_from="${args[0]}"
+repo_to="${args[1]}"
+ftppath_from="${FTP_BASE}/${repo_from}/os/"
+ftppath_to="${FTP_BASE}/${repo_to}/os/"
+
+if ! check_repo_permission $repo_to || ! check_repo_permission $repo_from; then
+ die "You don't have permission to move packages from ${repo_from} to ${repo_to}"
+fi
+
+# TODO: this might lock too much (architectures)
+for pkgarch in ${ARCHES[@]}; do
+ repo_lock ${repo_to} ${pkgarch} || exit 1
+ repo_lock ${repo_from} ${pkgarch} || exit 1
+done
+
+# check if packages to be moved exist in svn and ftp dir
+/usr/bin/svn checkout -q -N "${SVNREPO}" "${WORKDIR}/svn" >/dev/null
+for pkgbase in ${args[@]:2}; do
+ /usr/bin/svn up -q "${WORKDIR}/svn/${pkgbase}" >/dev/null
+ for pkgarch in ${ARCHES[@]} 'any'; do
+ svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
+ if [ -r "${svnrepo_from}/PKGBUILD" ]; then
+ pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo ${pkgname[@]}))
+ if [ ${#pkgnames[@]} -lt 1 ]; then
+ die "Could not read pkgname"
+ fi
+
+ pkgver=$(. "${svnrepo_from}/PKGBUILD"; echo "${pkgver}-${pkgrel}")
+ if [ -z "${pkgver}" ]; then
+ die "Could not read pkgver"
+ fi
+
+ if [ "${pkgarch}" == 'any' ]; then
+ tarches=(${ARCHES[@]})
+ else
+ tarches=("${pkgarch}")
+ fi
+
+ for pkgname in ${pkgnames[@]}; do
+ for tarch in ${tarches[@]}; do
+ getpkgfile "${ftppath_from}/${tarch}/"${pkgname}-${pkgver}-${pkgarch}${PKGEXT} >/dev/null
+ done
+ done
+ continue 2
+ fi
+ done
+ die "${pkgbase} not found in ${repo_from}"
+done
+
+msg "Moving packages from [${repo_from}] to [${repo_to}]..."
+
+declare -A add_pkgs
+declare -A remove_pkgs
+for pkgbase in ${args[@]:2}; do
+ for pkgarch in ${ARCHES[@]} 'any'; do
+ svnrepo_from="${WORKDIR}/svn/${pkgbase}/repos/${repo_from}-${pkgarch}"
+ svnrepo_to="${WORKDIR}/svn/${pkgbase}/repos/${repo_to}-${pkgarch}"
+
+ if [ -f "${svnrepo_from}/PKGBUILD" ]; then
+ if [ "${pkgarch}" == 'any' ]; then
+ tarches=(${ARCHES[@]})
+ else
+ tarches=("${pkgarch}")
+ fi
+ msg2 "${pkgbase} ($(echo ${tarches[@]}))"
+ pkgnames=($(. "${svnrepo_from}/PKGBUILD"; echo ${pkgname[@]}))
+ pkgver=$(. "${svnrepo_from}/PKGBUILD"; echo "${pkgver}-${pkgrel}")
+
+ if [ -d "${svnrepo_to}" ]; then
+ /usr/bin/svn rm --force -q "${svnrepo_to}"
+ /usr/bin/svn commit -q "${WORKDIR}/svn/${pkgbase}" -m "$(basename $0): ${pkgbase} removed by $(id -un) for move to [${repo_to}] (${pkgarch})"
+ fi
+
+ /usr/bin/svn mv -q -r HEAD "${svnrepo_from}" "${svnrepo_to}"
+ /usr/bin/svn commit -q "${WORKDIR}/svn/${pkgbase}" -m "$(basename $0): moved ${pkgbase} from [${repo_from}] to [${repo_to}] (${pkgarch})"
+
+ for pkgname in ${pkgnames[@]}; do
+ for tarch in ${tarches[@]}; do
+ pkgpath=$(getpkgfile "${ftppath_from}/${tarch}/"${pkgname}-${pkgver}-${pkgarch}${PKGEXT})
+ pkgfile=$(basename "${pkgpath}")
+
+ # copy package to pool if needed
+ # TODO: can be removed once every package has been moved to the package pool
+ if [ ! -f ${FTP_BASE}/${PKGPOOL}/${pkgfile} ]; then
+ cp ${pkgpath} ${FTP_BASE}/${PKGPOOL}
+ fi
+ ln -s "../../../${PKGPOOL}/${pkgfile}" ${ftppath_to}/${tarch}/
+ add_pkgs[${tarch}]+="${FTP_BASE}/${PKGPOOL}/${pkgfile} "
+ remove_pkgs[${tarch}]+="${pkgname} "
+ done
+ done
+ fi
+ done
+done
+
+for tarch in ${ARCHES[@]}; do
+ if [ -n "${add_pkgs[${tarch}]}" ]; then
+ arch_repo_add "${repo_to}" "${tarch}" ${add_pkgs[${tarch}]}
+ arch_repo_remove "${repo_from}" "${tarch}" ${remove_pkgs[${tarch}]}
+ fi
+done
+
+for pkgarch in ${ARCHES[@]}; do
+ repo_unlock ${repo_from} ${pkgarch}
+ repo_unlock ${repo_to} ${pkgarch}
+done
diff --git a/db-remove b/db-remove
new file mode 100755
index 0000000..292af5d
--- /dev/null
+++ b/db-remove
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -ne 3 ]; then
+ msg "usage: $(basename $0) <pkgname|pkgbase> <repo> <arch>"
+ exit 1
+fi
+
+pkgbase="$1"
+repo="$2"
+arch="$3"
+
+ftppath="$FTP_BASE/$repo/os"
+svnrepo="$repo-$arch"
+
+if ! check_repo_permission $repo; then
+ die "You don't have permission to remove packages from ${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=(${ARCHES[@]})
+else
+ tarches=("$arch")
+fi
+
+for tarch in ${tarches[@]}; do
+ repo_lock $repo $tarch || exit 1
+done
+
+msg "Removing $pkgbase from [$repo]..."
+/usr/bin/svn checkout -q "${SVNREPO}/${pkgbase}" "${WORKDIR}/svn/${pkgbase}" >/dev/null
+
+if [ -d "${WORKDIR}/svn/$pkgbase/repos/$svnrepo" ]; then
+ pkgnames=($(. "${WORKDIR}/svn/$pkgbase/repos/$svnrepo/PKGBUILD"; echo ${pkgname[@]}))
+ /usr/bin/svn rm --force -q "${WORKDIR}/svn/$pkgbase/repos/$svnrepo"
+ /usr/bin/svn commit -q "${WORKDIR}/svn/$pkgbase" -m "$(basename $0): $pkgbase removed by $(id -un)"
+else
+ warning "$pkgbase not found in $svnrepo"
+fi
+
+for tarch in ${tarches[@]}; do
+ arch_repo_remove "${repo}" "${tarch}" ${pkgnames[@]}
+ repo_unlock $repo $tarch
+done
diff --git a/db-repo-add b/db-repo-add
new file mode 100755
index 0000000..53cfc84
--- /dev/null
+++ b/db-repo-add
@@ -0,0 +1,39 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -ne 3 ]; then
+ msg "usage: $(basename $0) <pkgfile> <repo> <arch>"
+ exit 1
+fi
+
+pkgfile="$1"
+repo="$2"
+arch="$3"
+
+ftppath="$FTP_BASE/$repo/os"
+
+if ! check_repo_permission $repo; then
+ die "You don't have permission to add packages to ${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=(${ARCHES[@]})
+else
+ tarches=("$arch")
+fi
+
+for tarch in ${tarches[@]}; do
+ repo_lock $repo $tarch || exit 1
+done
+
+msg "Adding $pkgfile to [$repo]..."
+
+for tarch in ${tarches[@]}; do
+ if [ ! -f "${pkgfile}" ]; then
+ die "Package file ${pkgfile} not found"
+ fi
+ arch_repo_add "${repo}" "${tarch}" ${pkgfile}
+ repo_unlock $repo $tarch
+done
diff --git a/db-repo-remove b/db-repo-remove
new file mode 100755
index 0000000..b01910f
--- /dev/null
+++ b/db-repo-remove
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -ne 3 ]; then
+ msg "usage: $(basename $0) <pkgname> <repo> <arch>"
+ exit 1
+fi
+
+pkgname="$1"
+repo="$2"
+arch="$3"
+
+ftppath="$FTP_BASE/$repo/os"
+
+if ! check_repo_permission $repo; then
+ die "You don't have permission to remove packages from ${repo}"
+fi
+
+if [ "$arch" == "any" ]; then
+ tarches=(${ARCHES[@]})
+else
+ tarches=("$arch")
+fi
+
+for tarch in ${tarches[@]}; do
+ repo_lock $repo $tarch || exit 1
+done
+
+msg "Removing $pkgname from [$repo]..."
+
+for tarch in ${tarches[@]}; do
+ arch_repo_remove "${repo}" "${tarch}" ${pkgname}
+ repo_unlock $repo $tarch
+done
diff --git a/db-update b/db-update
new file mode 100755
index 0000000..4740809
--- /dev/null
+++ b/db-update
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -ge 1 ]; then
+ warning "Calling $(basename $0) with a specific repository is no longer supported"
+ exit 1
+fi
+
+# Find repos with packages to release
+repos=($(find "${STAGING}" -mindepth 1 -type d ! -empty -printf '%f ' 2>/dev/null))
+if [ $? -ge 1 ]; then
+ die "Could not read ${STAGING}"
+fi
+
+# TODO: this might lock too much (architectures)
+for repo in ${repos[@]}; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${pkgarch} || exit 1
+ done
+done
+
+# check if packages are valid
+for repo in ${repos[@]}; do
+ if ! check_repo_permission "${repo}"; then
+ die "You don't have permission to update packages in ${repo}"
+ fi
+ pkgs=($(getpkgfiles "${STAGING}/${repo}/"*${PKGEXT}))
+ if [ $? -eq 0 ]; then
+ for pkg in ${pkgs[@]}; do
+ if [ -h "${pkg}" ]; then
+ die "Package ${repo}/$(basename ${pkg}) is a symbolic link"
+ fi
+ if ! check_pkgfile "${pkg}"; then
+ die "Package ${repo}/$(basename ${pkg}) is not consistent with its meta data"
+ fi
+ #if ! check_pkgrepos "${pkg}"; then
+ # die "Package ${repo}/$(basename ${pkg}) already exists in another repository"
+ #fi
+ done
+ if ! check_splitpkgs ${repo} ${pkgs[@]}; then
+ die "Missing split packages for ${repo}"
+ fi
+ else
+ die "Could not read ${STAGING}"
+ fi
+done
+
+for repo in ${repos[@]}; do
+ msg "Updating [${repo}]..."
+ any_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-any${PKGEXT} 2>/dev/null))
+ for pkgarch in ${ARCHES[@]}; do
+ add_pkgs=()
+ arch_pkgs=($(getpkgfiles "${STAGING}/${repo}/"*-${pkgarch}${PKGEXT} 2>/dev/null))
+ for pkg in ${arch_pkgs[@]} ${any_pkgs[@]}; do
+ pkgfile="$(basename ${pkg})"
+ msg2 "${pkgfile} (${pkgarch})"
+ # any packages might have been moved by the previous run
+ if [ -f "${pkg}" ]; then
+ mv "${pkg}" "$FTP_BASE/${PKGPOOL}"
+ fi
+ ln -s "../../../${PKGPOOL}/${pkgfile}" "$FTP_BASE/$repo/os/${pkgarch}"
+ add_pkgs[${#add_pkgs[*]}]=${pkgfile}
+ done
+ if [ ${#add_pkgs[@]} -ge 1 ]; then
+ arch_repo_add "${repo}" "${pkgarch}" ${add_pkgs[@]}
+ fi
+ done
+done
+
+for repo in ${repos[@]}; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${pkgarch}
+ done
+done
diff --git a/test/lib/common.inc b/test/lib/common.inc
new file mode 100644
index 0000000..208400c
--- /dev/null
+++ b/test/lib/common.inc
@@ -0,0 +1,191 @@
+set -E
+
+. "$(dirname ${BASH_SOURCE[0]})/../../config"
+. "$(dirname ${BASH_SOURCE[0]})/../../db-functions"
+
+oneTimeSetUp() {
+ local p
+ local d
+ local a
+ local pkgname
+ local pkgarch
+ local pkgversion
+ local build
+ pkgdir="$(mktemp -d /dev/shm/$(basename $0).XXXXXXXXXX)"
+ cp -Lr $(dirname ${BASH_SOURCE[0]})/../packages/* "${pkgdir}"
+ msg 'Building packages...'
+ for d in "${pkgdir}"/*; do
+ pushd $d >/dev/null
+ pkgname=($(. PKGBUILD; echo ${pkgname[@]}))
+ pkgarch=($(. PKGBUILD; echo ${arch[@]}))
+ pkgversion=$(. PKGBUILD; echo ${pkgver}-${pkgrel})
+
+ build=true
+ for a in ${pkgarch[@]}; do
+ for p in ${pkgname[@]}; do
+ [ ! -f ${p}-${pkgversion}-${a}${PKGEXT} ] && build=false
+ done
+ done
+
+ if ! ${build}; then
+ if [ "${pkgarch[0]}" == 'any' ]; then
+ extra-x86_64-build || die 'extra-x86_64-build failed'
+ else
+ for a in ${pkgarch[@]}; do
+ extra-${a}-build || die "extra-${a}-build failed"
+ done
+ fi
+ for a in ${pkgarch[@]}; do
+ for p in ${pkgname[@]}; do
+ cp ${p}-${pkgversion}-${a}${PKGEXT} $(dirname ${BASH_SOURCE[0]})/../packages/$(basename ${d})
+ done
+ done
+ fi
+ popd >/dev/null
+ done
+}
+
+oneTimeTearDown() {
+ rm -rf "${pkgdir}"
+}
+
+setUp() {
+ local p
+ local pkg
+ local r
+ local a
+
+ [ -f "$(dirname ${BASH_SOURCE[0]})/../../config.local" ] && die "$(dirname ${BASH_SOURCE[0]})/../../config.local exists"
+ TMP="$(mktemp -d /dev/shm/$(basename $0).XXXXXXXXXX)"
+ #msg "Using ${TMP}"
+
+ PKGREPOS=('core' 'extra' 'testing')
+ PKGPOOL='pool/packages'
+ mkdir -p "${TMP}/"{ftp,tmp,staging,{package,source}-cleanup,svn-packages-{copy,repo}}
+
+ for r in ${PKGREPOS[@]}; do
+ mkdir -p "${TMP}/staging/${r}"
+ for a in ${ARCHES[@]} any; do
+ mkdir -p "${TMP}/ftp/${r}/os/${a}"
+ done
+ done
+ mkdir -p "${TMP}/ftp/${PKGPOOL}"
+ mkdir -p "${TMP}/ftp/${SRCPOOL}"
+
+ msg 'Creating svn repository...'
+ svnadmin create "${TMP}/svn-packages-repo"
+ svn checkout -q "file://${TMP}/svn-packages-repo" "${TMP}/svn-packages-copy"
+
+ for p in "${pkgdir}"/*; do
+ pkg=$(basename $p)
+ mkdir -p "${TMP}/svn-packages-copy/${pkg}"/{trunk,repos}
+ cp "${p}"/* "${TMP}/svn-packages-copy"/${pkg}/trunk/
+ svn add -q "${TMP}/svn-packages-copy"/${pkg}
+ svn commit -q -m"initial commit of ${pkg}" "${TMP}/svn-packages-copy"
+ done
+
+ cat <<eot > "$(dirname ${BASH_SOURCE[0]})/../../config.local"
+ FTP_BASE="${TMP}/ftp"
+ SVNREPO="file://${TMP}/svn-packages-repo"
+ PKGREPOS=(${PKGREPOS[@]})
+ PKGPOOL="${PKGPOOL}"
+ CLEANUP_DESTDIR="${TMP}/package-cleanup"
+ SOURCE_CLEANUP_DESTDIR="${TMP}/source-cleanup"
+ STAGING="${TMP}/staging"
+ TMPDIR="${TMP}/tmp"
+ CLEANUP_DRYRUN=false
+ SOURCE_CLEANUP_DRYRUN=false
+eot
+ . "$(dirname ${BASH_SOURCE[0]})/../../config"
+}
+
+tearDown() {
+ rm -rf "${TMP}"
+ rm -f "$(dirname ${BASH_SOURCE[0]})/../../config.local"
+ echo
+}
+
+releasePackage() {
+ local repo=$1
+ local pkgbase=$2
+ local arch=$3
+
+ pushd "${TMP}/svn-packages-copy"/${pkgbase}/trunk/ >/dev/null
+ archrelease ${repo}-${arch} >/dev/null 2&>1
+ pkgver=$(. PKGBUILD; echo "${pkgver}-${pkgrel}")
+ popd >/dev/null
+ cp "${pkgdir}/${pkgbase}"/*-${pkgver}-${arch}.pkg.tar.* "${STAGING}"/${repo}/
+}
+
+checkAnyPackage() {
+ local repo=$1
+ local pkg=$2
+ local arch
+
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found"
+
+ for arch in i686 x86_64; do
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink"
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
+ done
+ [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir"
+
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkg}) \
+ || fail "${pkg} not in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+
+ [ -r "${FTP_BASE}/${repo}/os/any/${pkg}" ] && fail "${repo}/os/any/${pkg} should not exist"
+}
+
+checkPackage() {
+ local repo=$1
+ local pkg=$2
+ local arch=$3
+
+ [ -r "${FTP_BASE}/${PKGPOOL}/${pkg}" ] || fail "${PKGPOOL}/${pkg} not found"
+ [ -L "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] || fail "${repo}/os/${arch}/${pkg} not a symlink"
+ [ -r "${STAGING}"/${repo}/${pkg} ] && fail "${repo}/${pkg} found in staging dir"
+
+ [ "$(readlink -e "${FTP_BASE}/${repo}/os/${arch}/${pkg}")" == "${FTP_BASE}/${PKGPOOL}/${pkg}" ] \
+ || fail "${repo}/os/${arch}/${pkg} does not link to ${PKGPOOL}/${pkg}"
+
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkg}) \
+ || fail "${pkg} not in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+
+ local pkgbase=$(getpkgbase "${FTP_BASE}/${PKGPOOL}/${pkg}")
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \
+ || fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} does not exist"
+}
+
+checkRemovedPackage() {
+ local repo=$1
+ local pkgbase=$2
+ local arch=$3
+
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-${arch}" ] \
+ && fail "svn-packages-copy/${pkgbase}/repos/${repo}-${arch} should not exist"
+}
+
+checkRemovedAnyPackage() {
+ local repo=$1
+ local pkgbase=$2
+ local arch
+
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+ done
+
+ svn up -q "${TMP}/svn-packages-copy/${pkgbase}"
+ [ -d "${TMP}/svn-packages-copy/${pkgbase}/repos/${repo}-any" ] \
+ && fail "svn-packages-copy/${pkgbase}/repos/${repo}-any should not exist"
+}
diff --git a/test/lib/shunit2 b/test/lib/shunit2
new file mode 100644
index 0000000..d900a70
--- /dev/null
+++ b/test/lib/shunit2
@@ -0,0 +1,1116 @@
+# $Id: shunit2 277 2008-10-29 21:20:22Z kate.ward@forestent.com $
+# vim:et:ft=sh:sts=2:sw=2
+# vim:foldmethod=marker:foldmarker=/**,*/
+#
+#/**
+# <?xml version="1.0" encoding="UTF-8"?>
+# <s:shelldoc xmlns:s="http://www.forestent.com/projects/shelldoc/xsl/2005.0">
+# <s:header>
+# shUnit 2.1.5
+# Shell Unit Test Framework
+#
+# http://shunit2.googlecode.com/
+#
+# written by Kate Ward &lt;kate.ward@forestent.com&gt;
+# released under the LGPL
+#
+# This module implements a xUnit based unit test framework similar to JUnit.
+# </s:header>
+#*/
+
+SHUNIT_VERSION='2.1.5'
+
+SHUNIT_TRUE=0
+SHUNIT_FALSE=1
+SHUNIT_ERROR=2
+
+_shunit_warn() { echo "shunit2:WARN $@" >&2; }
+_shunit_error() { echo "shunit2:ERROR $@" >&2; }
+_shunit_fatal() { echo "shunit2:FATAL $@" >&2; }
+
+# specific shell checks
+if [ -n "${ZSH_VERSION:-}" ]; then
+ setopt |grep "^shwordsplit$" >/dev/null
+ if [ $? -ne ${SHUNIT_TRUE} ]; then
+ _shunit_fatal 'zsh shwordsplit option is required for proper operation'
+ exit ${SHUNIT_ERROR}
+ fi
+ if [ -z "${SHUNIT_PARENT:-}" ]; then
+ _shunit_fatal "zsh does not pass \$0 through properly. please declare \
+\"SHUNIT_PARENT=\$0\" before calling shUnit2"
+ exit ${SHUNIT_ERROR}
+ fi
+fi
+
+#
+# constants
+#
+
+__SHUNIT_ASSERT_MSG_PREFIX='ASSERT:'
+__SHUNIT_PARENT=${SHUNIT_PARENT:-$0}
+
+# set the constants readonly
+shunit_constants_=`set |grep '^__SHUNIT_' |cut -d= -f1`
+echo "${shunit_constants_}" |grep '^Binary file' >/dev/null \
+ && shunit_constants_=`set |grep -a '^__SHUNIT_' |cut -d= -f1`
+for shunit_constant_ in ${shunit_constants_}; do
+ shunit_ro_opts_=''
+ case ${ZSH_VERSION:-} in
+ '') ;; # this isn't zsh
+ [123].*) ;; # early versions (1.x, 2.x, 3.x)
+ *) shunit_ro_opts_='-g' ;; # all later versions. declare readonly globally
+ esac
+ readonly ${shunit_ro_opts_} ${shunit_constant_}
+done
+unset shunit_constant_ shunit_constants_ shunit_ro_opts_
+
+# variables
+__shunit_skip=${SHUNIT_FALSE}
+__shunit_suite=''
+
+# counts of tests
+__shunit_testSuccess=${SHUNIT_TRUE}
+__shunit_testsTotal=0
+__shunit_testsPassed=0
+__shunit_testsFailed=0
+
+# counts of asserts
+__shunit_assertsTotal=0
+__shunit_assertsPassed=0
+__shunit_assertsFailed=0
+__shunit_assertsSkipped=0
+
+__shunit_lineno=''
+__shunit_reportGenerated=${SHUNIT_FALSE}
+
+# macros
+_SHUNIT_LINENO_='eval __shunit_lineno=""; if [ "${1:-}" = "--lineno" ]; then [ -n "$2" ] && __shunit_lineno="[$2] "; shift 2; fi'
+
+#-----------------------------------------------------------------------------
+# assert functions
+#
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertEquals</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>expected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that <emphasis>expected</emphasis> and
+# <emphasis>actual</emphasis> are equal to one another. The message is
+# optional.</para>
+# </entry>
+# </s:function>
+#*/
+assertEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertEquals() requires two or three arguments; $# given"
+ _shunit_error "1: ${1:+$1} 2: ${2:+$2} 3: ${3:+$3}"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_expected_=$1
+ shunit_actual_=$2
+
+ shunit_return=${SHUNIT_TRUE}
+ if [ "${shunit_expected_}" = "${shunit_actual_}" ]; then
+ _shunit_assertPass
+ else
+ failNotEquals "${shunit_message_}" "${shunit_expected_}" "${shunit_actual_}"
+ shunit_return=${SHUNIT_FALSE}
+ fi
+
+ unset shunit_message_ shunit_expected_ shunit_actual_
+ return ${shunit_return}
+}
+_ASSERT_EQUALS_='eval assertEquals --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertNotEquals</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>unexpected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that <emphasis>unexpected</emphasis> and
+# <emphasis>actual</emphasis> are <emphasis role="strong">not</emphasis>
+# equal to one another. The message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+assertNotEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertNotEquals() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_unexpected_=$1
+ shunit_actual_=$2
+
+ shunit_return=${SHUNIT_TRUE}
+ if [ "${shunit_unexpected_}" != "${shunit_actual_}" ]; then
+ _shunit_assertPass
+ else
+ failSame "${shunit_message_}" "$@"
+ shunit_return=${SHUNIT_FALSE}
+ fi
+
+ unset shunit_message_ shunit_unexpected_ shunit_actual_
+ return ${shunit_return}
+}
+_ASSERT_NOT_EQUALS_='eval assertNotEquals --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertNull</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>value</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that <emphasis>value</emphasis> is <literal>null</literal>,
+# or in shell terms a zero-length string. The message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+assertNull()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 1 -o $# -gt 2 ]; then
+ _shunit_error "assertNull() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ assertTrue "${shunit_message_}" "[ -z '$1' ]"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NULL_='eval assertNull --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertNotNull</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>value</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that <emphasis>value</emphasis> is <emphasis
+# role="strong">not</emphasis> <literal>null</literal>, or in shell terms not
+# a zero-length string. The message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+assertNotNull()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 2 ]; then # allowing 0 arguments as $1 might actually be null
+ _shunit_error "assertNotNull() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ assertTrue "${shunit_message_}" "[ -n '${1:-}' ]"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NOT_NULL_='eval assertNotNull --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertSame</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>expected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function is functionally equivalent to
+# <function>assertEquals</function>.</para>
+# </entry>
+# </s:function>
+#*/
+assertSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertSame() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ assertEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_SAME_='eval assertSame --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertNotSame</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>unexpected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that <emphasis>unexpected</emphasis> and
+# <emphasis>actual</emphasis> are <emphasis role="strong">not</emphasis>
+# equal to one another. The message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+assertNotSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "assertNotSame() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_:-}$1"
+ shift
+ fi
+ assertNotEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_ASSERT_NOT_SAME_='eval assertNotSame --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertTrue</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>condition</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that a given shell test condition is true. The message is
+# optional.</para>
+# <para>Testing whether something is true or false is easy enough by using
+# the assertEquals/assertNotSame functions. Shell supports much more
+# complicated tests though, and a means to support them was needed. As such,
+# this function tests that conditions are true or false through evaluation
+# rather than just looking for a true or false.</para>
+# <funcsynopsis>
+# The following test will succeed: <funcsynopsisinfo>assertTrue "[ 34 -gt 23 ]"</funcsynopsisinfo>
+# The folloing test will fail with a message: <funcsynopsisinfo>assertTrue "test failed" "[ -r '/non/existant/file' ]"</funcsynopsisinfo>
+# </funcsynopsis>
+# </entry>
+# </s:function>
+#*/
+assertTrue()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 2 ]; then
+ _shunit_error "assertTrue() takes one two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_condition_=$1
+
+ # see if condition is an integer, i.e. a return value
+ shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'`
+ shunit_return=${SHUNIT_TRUE}
+ if [ -z "${shunit_condition_}" ]; then
+ # null condition
+ shunit_return=${SHUNIT_FALSE}
+ elif [ "${shunit_condition_}" = "${shunit_match_}" ]; then
+ # possible return value. treating 0 as true, and non-zero as false.
+ [ ${shunit_condition_} -ne 0 ] && shunit_return=${SHUNIT_FALSE}
+ else
+ # (hopefully) a condition
+ ( eval ${shunit_condition_} ) >/dev/null 2>&1
+ [ $? -ne 0 ] && shunit_return=${SHUNIT_FALSE}
+ fi
+
+ # record the test
+ if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_assertPass
+ else
+ _shunit_assertFail "${shunit_message_}"
+ fi
+
+ unset shunit_message_ shunit_condition_ shunit_match_
+ return ${shunit_return}
+}
+_ASSERT_TRUE_='eval assertTrue --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="asserts">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>assertFalse</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>condition</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Asserts that a given shell test condition is false. The message is
+# optional.</para>
+# <para>Testing whether something is true or false is easy enough by using
+# the assertEquals/assertNotSame functions. Shell supports much more
+# complicated tests though, and a means to support them was needed. As such,
+# this function tests that conditions are true or false through evaluation
+# rather than just looking for a true or false.</para>
+# <funcsynopsis>
+# The following test will succeed: <funcsynopsisinfo>assertFalse "[ 'apples' = 'oranges' ]"</funcsynopsisinfo>
+# The folloing test will fail with a message: <funcsynopsisinfo>assertFalse "test failed" "[ 1 -eq 1 -a 2 -eq 2 ]"</funcsynopsisinfo>
+# </funcsynopsis>
+# </entry>
+# </s:function>
+#*/
+assertFalse()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 1 -o $# -gt 2 ]; then
+ _shunit_error "assertFalse() quires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 2 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_condition_=$1
+
+ # see if condition is an integer, i.e. a return value
+ shunit_match_=`expr "${shunit_condition_}" : '\([0-9]*\)'`
+ shunit_return=${SHUNIT_TRUE}
+ if [ -z "${shunit_condition_}" ]; then
+ # null condition
+ shunit_return=${SHUNIT_FALSE}
+ elif [ "${shunit_condition_}" = "${shunit_match_}" ]; then
+ # possible return value. treating 0 as true, and non-zero as false.
+ [ ${shunit_condition_} -eq 0 ] && shunit_return=${SHUNIT_FALSE}
+ else
+ # (hopefully) a condition
+ ( eval ${shunit_condition_} ) >/dev/null 2>&1
+ [ $? -eq 0 ] && shunit_return=${SHUNIT_FALSE}
+ fi
+
+ # record the test
+ if [ ${shunit_return} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_assertPass
+ else
+ _shunit_assertFail "${shunit_message_}"
+ fi
+
+ unset shunit_message_ shunit_condition_ shunit_match_
+ return ${shunit_return}
+}
+_ASSERT_FALSE_='eval assertFalse --lineno "${LINENO:-}"'
+
+#-----------------------------------------------------------------------------
+# failure functions
+#
+
+#/**
+# <s:function group="failures">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>fail</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Fails the test immediately, with the optional message.</para>
+# </entry>
+# </s:function>
+#*/
+fail()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -gt 1 ]; then
+ _shunit_error "fail() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 1 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+
+ _shunit_assertFail "${shunit_message_}"
+
+ unset shunit_message_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_='eval fail --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="failures">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>failNotEquals</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>unexpected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Fails the test if <emphasis>unexpected</emphasis> and
+# <emphasis>actual</emphasis> are <emphasis role="strong">not</emphasis>
+# equal to one another. The message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+failNotEquals()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failNotEquals() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ shunit_unexpected_=$1
+ shunit_actual_=$2
+
+ _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected:<${shunit_unexpected_}> but was:<${shunit_actual_}>"
+
+ unset shunit_message_ shunit_unexpected_ shunit_actual_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_NOT_EQUALS_='eval failNotEquals --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="failures">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>failSame</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Indicate test failure because arguments were the same. The message is
+# optional.</para>
+# </entry>
+# </s:function>
+#*/
+failSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failSame() requires two or three arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+
+ _shunit_assertFail "${shunit_message_:+${shunit_message_} }expected not same"
+
+ unset shunit_message_
+ return ${SHUNIT_FALSE}
+}
+_FAIL_SAME_='eval failSame --lineno "${LINENO:-}"'
+
+#/**
+# <s:function group="failures">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>failNotSame</function></funcdef>
+# <paramdef>string <parameter>[message]</parameter></paramdef>
+# <paramdef>string <parameter>expected</parameter></paramdef>
+# <paramdef>string <parameter>actual</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>Indicate test failure because arguments were not the same. The
+# message is optional.</para>
+# </entry>
+# </s:function>
+#*/
+failNotSame()
+{
+ ${_SHUNIT_LINENO_}
+ if [ $# -lt 2 -o $# -gt 3 ]; then
+ _shunit_error "failNotEquals() requires one or two arguments; $# given"
+ return ${SHUNIT_ERROR}
+ fi
+ _shunit_shouldSkip && return ${SHUNIT_TRUE}
+
+ shunit_message_=${__shunit_lineno}
+ if [ $# -eq 3 ]; then
+ shunit_message_="${shunit_message_}$1"
+ shift
+ fi
+ failNotEquals "${shunit_message_}" "$1" "$2"
+ shunit_return=$?
+
+ unset shunit_message_
+ return ${shunit_return}
+}
+_FAIL_NOT_SAME_='eval failNotSame --lineno "${LINENO:-}"'
+
+#-----------------------------------------------------------------------------
+# skipping functions
+#
+
+#/**
+# <s:function group="skipping">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>startSkipping</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function forces the remaining assert and fail functions to be
+# "skipped", i.e. they will have no effect. Each function skipped will be
+# recorded so that the total of asserts and fails will not be altered.</para>
+# </entry>
+# </s:function>
+#*/
+startSkipping()
+{
+ __shunit_skip=${SHUNIT_TRUE}
+}
+
+#/**
+# <s:function group="skipping">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>endSkipping</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function returns calls to the assert and fail functions to their
+# default behavior, i.e. they will be called.</para>
+# </entry>
+# </s:function>
+#*/
+endSkipping()
+{
+ __shunit_skip=${SHUNIT_FALSE}
+}
+
+#/**
+# <s:function group="skipping">
+# <entry align="right">
+# <emphasis>boolean</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>isSkipping</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function returns the state of skipping.</para>
+# </entry>
+# </s:function>
+#*/
+isSkipping()
+{
+ return ${__shunit_skip}
+}
+
+#-----------------------------------------------------------------------------
+# suite functions
+#
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>suite</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function can be optionally overridden by the user in their test
+# suite.</para>
+# <para>If this function exists, it will be called when
+# <command>shunit2</command> is sourced. If it does not exist, shUnit2 will
+# search the parent script for all functions beginning with the word
+# <literal>test</literal>, and they will be added dynamically to the test
+# suite.</para>
+# </entry>
+# </s:function>
+#*/
+# Note: see _shunit_mktempFunc() for actual implementation
+# suite() { :; }
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>suite_addTest</function></funcdef>
+# <paramdef>string <parameter>function</parameter></paramdef>
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function adds a function name to the list of tests scheduled for
+# execution as part of this test suite. This function should only be called
+# from within the <function>suite()</function> function.</para>
+# </entry>
+# </s:function>
+#*/
+suite_addTest()
+{
+ shunit_func_=${1:-}
+
+ __shunit_suite="${__shunit_suite:+${__shunit_suite} }${shunit_func_}"
+ __shunit_testsTotal=`expr ${__shunit_testsTotal} + 1`
+
+ unset shunit_func_
+}
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>oneTimeSetUp</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function can be be optionally overridden by the user in their
+# test suite.</para>
+# <para>If this function exists, it will be called once before any tests are
+# run. It is useful to prepare a common environment for all tests.</para>
+# </entry>
+# </s:function>
+#*/
+# Note: see _shunit_mktempFunc() for actual implementation
+# oneTimeSetUp() { :; }
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>oneTimeTearDown</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function can be be optionally overridden by the user in their
+# test suite.</para>
+# <para>If this function exists, it will be called once after all tests are
+# completed. It is useful to clean up the environment after all tests.</para>
+# </entry>
+# </s:function>
+#*/
+# Note: see _shunit_mktempFunc() for actual implementation
+# oneTimeTearDown() { :; }
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>setUp</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function can be be optionally overridden by the user in their
+# test suite.</para>
+# <para>If this function exists, it will be called before each test is run.
+# It is useful to reset the environment before each test.</para>
+# </entry>
+# </s:function>
+#*/
+# Note: see _shunit_mktempFunc() for actual implementation
+# setUp() { :; }
+
+#/**
+# <s:function group="suites">
+# <entry align="right">
+# <emphasis>void</emphasis>
+# </entry>
+# <entry>
+# <funcsynopsis>
+# <funcprototype>
+# <funcdef><function>tearDown</function></funcdef>
+# <paramdef />
+# </funcprototype>
+# </funcsynopsis>
+# <para>This function can be be optionally overridden by the user in their
+# test suite.</para>
+# <para>If this function exists, it will be called after each test completes.
+# It is useful to clean up the environment after each test.</para>
+# </entry>
+# </s:function>
+#*/
+# Note: see _shunit_mktempFunc() for actual implementation
+# tearDown() { :; }
+
+#------------------------------------------------------------------------------
+# internal shUnit2 functions
+#
+
+# this function is a cross-platform temporary directory creation tool. not all
+# OSes have the mktemp function, so one is included here.
+_shunit_mktempDir()
+{
+ # try the standard mktemp function
+ ( exec mktemp -dqt shunit.XXXXXX 2>/dev/null ) && return
+
+ # the standard mktemp didn't work. doing our own.
+ if [ -r '/dev/urandom' ]; then
+ _shunit_random_=`od -vAn -N4 -tx4 </dev/urandom |sed 's/^[^0-9a-f]*//'`
+ elif [ -n "${RANDOM:-}" ]; then
+ # $RANDOM works
+ _shunit_random_=${RANDOM}${RANDOM}${RANDOM}$$
+ else
+ # $RANDOM doesn't work
+ _shunit_date_=`date '+%Y%m%d%H%M%S'`
+ _shunit_random_=`expr ${_shunit_date_} / $$`
+ fi
+
+ _shunit_tmpDir_="${TMPDIR:-/tmp}/shunit.${_shunit_random_}"
+ ( umask 077 && mkdir "${_shunit_tmpDir_}" ) || {
+ _shunit_fatal 'could not create temporary directory! exiting'
+ exit ${SHUNIT_FALSE}
+ }
+
+ echo ${_shunit_tmpDir_}
+ unset _shunit_date_ _shunit_random_ _shunit_tmpDir_
+}
+
+# this function is here to work around issues in Cygwin
+_shunit_mktempFunc()
+{
+ for _shunit_func_ in oneTimeSetUp oneTimeTearDown setUp tearDown suite; do
+ _shunit_file_="${__shunit_tmpDir}/${_shunit_func_}"
+ cat <<EOF >"${_shunit_file_}"
+#! /bin/sh
+exit ${SHUNIT_TRUE}
+EOF
+ chmod +x "${_shunit_file_}"
+ done
+
+ unset _shunit_file_
+}
+
+_shunit_cleanup()
+{
+ _shunit_name_=$1
+
+ case ${_shunit_name_} in
+ EXIT) _shunit_signal_=0 ;;
+ INT) _shunit_signal_=2 ;;
+ TERM) _shunit_signal_=15 ;;
+ *)
+ _shunit_warn "unrecognized trap value (${_shunit_name_})"
+ _shunit_signal_=0
+ ;;
+ esac
+
+ # do our work
+ rm -fr "${__shunit_tmpDir}"
+
+ # exit for all non-EXIT signals
+ if [ ${_shunit_name_} != 'EXIT' ]; then
+ _shunit_warn "trapped and now handling the (${_shunit_name_}) signal"
+ # disable EXIT trap
+ trap 0
+ # add 128 to signal and exit
+ exit `expr ${_shunit_signal_} + 128`
+ elif [ ${__shunit_reportGenerated} -eq ${SHUNIT_FALSE} ] ; then
+ _shunit_assertFail 'Unknown failure encountered running a test'
+ _shunit_generateReport
+ exit ${SHUNIT_ERROR}
+ fi
+
+ unset _shunit_name_ _shunit_signal_
+}
+
+# The actual running of the tests happens here.
+_shunit_execSuite()
+{
+ for _shunit_test_ in ${__shunit_suite}; do
+ __shunit_testSuccess=${SHUNIT_TRUE}
+
+ # disable skipping
+ endSkipping
+
+ # execute the per-test setup function
+ setUp
+
+ # execute the test
+ echo "${_shunit_test_}"
+ eval ${_shunit_test_}
+
+ # execute the per-test tear-down function
+ tearDown
+
+ # update stats
+ if [ ${__shunit_testSuccess} -eq ${SHUNIT_TRUE} ]; then
+ __shunit_testsPassed=`expr ${__shunit_testsPassed} + 1`
+ else
+ __shunit_testsFailed=`expr ${__shunit_testsFailed} + 1`
+ fi
+ done
+
+ unset _shunit_test_
+}
+
+# This function exits shUnit2 with the appropriate error code and OK/FAILED
+# message.
+_shunit_generateReport()
+{
+ _shunit_ok_=${SHUNIT_TRUE}
+
+ # if no exit code was provided one, determine an appropriate one
+ [ ${__shunit_testsFailed} -gt 0 \
+ -o ${__shunit_testSuccess} -eq ${SHUNIT_FALSE} ] \
+ && _shunit_ok_=${SHUNIT_FALSE}
+
+ echo
+ if [ ${__shunit_testsTotal} -eq 1 ]; then
+ echo "Ran ${__shunit_testsTotal} test."
+ else
+ echo "Ran ${__shunit_testsTotal} tests."
+ fi
+
+ _shunit_failures_=''
+ _shunit_skipped_=''
+ [ ${__shunit_assertsFailed} -gt 0 ] \
+ && _shunit_failures_="failures=${__shunit_assertsFailed}"
+ [ ${__shunit_assertsSkipped} -gt 0 ] \
+ && _shunit_skipped_="skipped=${__shunit_assertsSkipped}"
+
+ if [ ${_shunit_ok_} -eq ${SHUNIT_TRUE} ]; then
+ _shunit_msg_='OK'
+ [ -n "${_shunit_skipped_}" ] \
+ && _shunit_msg_="${_shunit_msg_} (${_shunit_skipped_})"
+ else
+ _shunit_msg_="FAILED (${_shunit_failures_}"
+ [ -n "${_shunit_skipped_}" ] \
+ && _shunit_msg_="${_shunit_msg_},${_shunit_skipped_}"
+ _shunit_msg_="${_shunit_msg_})"
+ fi
+
+ echo
+ echo ${_shunit_msg_}
+ __shunit_reportGenerated=${SHUNIT_TRUE}
+
+ unset _shunit_failures_ _shunit_msg_ _shunit_ok_ _shunit_skipped_
+}
+
+_shunit_shouldSkip()
+{
+ [ ${__shunit_skip} -eq ${SHUNIT_FALSE} ] && return ${SHUNIT_FALSE}
+ _shunit_assertSkip
+}
+
+_shunit_assertPass()
+{
+ __shunit_assertsPassed=`expr ${__shunit_assertsPassed} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+}
+
+_shunit_assertFail()
+{
+ _shunit_msg_=$1
+
+ __shunit_testSuccess=${SHUNIT_FALSE}
+ __shunit_assertsFailed=`expr ${__shunit_assertsFailed} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+ echo "${__SHUNIT_ASSERT_MSG_PREFIX}${_shunit_msg_}"
+
+ unset _shunit_msg_
+}
+
+_shunit_assertSkip()
+{
+ __shunit_assertsSkipped=`expr ${__shunit_assertsSkipped} + 1`
+ __shunit_assertsTotal=`expr ${__shunit_assertsTotal} + 1`
+}
+
+#------------------------------------------------------------------------------
+# main
+#
+
+# create a temporary storage location
+__shunit_tmpDir=`_shunit_mktempDir`
+
+# provide a public temporary directory for unit test scripts
+# TODO(kward): document this
+shunit_tmpDir="${__shunit_tmpDir}/tmp"
+mkdir "${shunit_tmpDir}"
+
+# setup traps to clean up after ourselves
+trap '_shunit_cleanup EXIT' 0
+trap '_shunit_cleanup INT' 2
+trap '_shunit_cleanup TERM' 15
+
+# create phantom functions to work around issues with Cygwin
+_shunit_mktempFunc
+PATH="${__shunit_tmpDir}:${PATH}"
+
+# execute the oneTimeSetUp function (if it exists)
+oneTimeSetUp
+
+# execute the suite function defined in the parent test script
+# deprecated as of 2.1.0
+suite
+
+# if no suite function was defined, dynamically build a list of functions
+if [ -z "${__shunit_suite}" ]; then
+ shunit_funcs_=`grep "^[ \t]*test[A-Za-z0-9_]* *()" ${__SHUNIT_PARENT} \
+ |sed 's/[^A-Za-z0-9_]//g'`
+ for shunit_func_ in ${shunit_funcs_}; do
+ suite_addTest ${shunit_func_}
+ done
+fi
+unset shunit_func_ shunit_funcs_
+
+# execute the tests
+_shunit_execSuite
+
+# execute the oneTimeTearDown function (if it exists)
+oneTimeTearDown
+
+# generate the report
+_shunit_generateReport
+
+# that's it folks
+[ ${__shunit_testsFailed} -eq 0 ]
+exit $?
+
+#/**
+# </s:shelldoc>
+#*/
diff --git a/test/packages/pkg-any-a/PKGBUILD b/test/packages/pkg-any-a/PKGBUILD
new file mode 100644
index 0000000..8749a35
--- /dev/null
+++ b/test/packages/pkg-any-a/PKGBUILD
@@ -0,0 +1,12 @@
+pkgname=pkg-any-a
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('any')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+package() {
+ install -d -m755 ${pkgdir}/usr/share/${pkgname}
+ echo 'test' > ${pkgdir}/usr/share/${pkgname}/test
+}
diff --git a/test/packages/pkg-any-b/PKGBUILD b/test/packages/pkg-any-b/PKGBUILD
new file mode 100644
index 0000000..e6a0498
--- /dev/null
+++ b/test/packages/pkg-any-b/PKGBUILD
@@ -0,0 +1,12 @@
+pkgname=pkg-any-b
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('any')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+package() {
+ install -d -m755 ${pkgdir}/usr/share/${pkgname}
+ echo 'test' > ${pkgdir}/usr/share/${pkgname}/test
+}
diff --git a/test/packages/pkg-simple-a/Makefile b/test/packages/pkg-simple-a/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/test/packages/pkg-simple-a/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/test/packages/pkg-simple-a/PKGBUILD b/test/packages/pkg-simple-a/PKGBUILD
new file mode 100644
index 0000000..953ecfa
--- /dev/null
+++ b/test/packages/pkg-simple-a/PKGBUILD
@@ -0,0 +1,22 @@
+pkgname=pkg-simple-a
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname}
+}
diff --git a/test/packages/pkg-simple-a/test.c b/test/packages/pkg-simple-a/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/test/packages/pkg-simple-a/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/test/packages/pkg-simple-b/Makefile b/test/packages/pkg-simple-b/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/test/packages/pkg-simple-b/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/test/packages/pkg-simple-b/PKGBUILD b/test/packages/pkg-simple-b/PKGBUILD
new file mode 100644
index 0000000..95ffd09
--- /dev/null
+++ b/test/packages/pkg-simple-b/PKGBUILD
@@ -0,0 +1,22 @@
+pkgname=pkg-simple-b
+pkgver=1
+pkgrel=1
+pkgdesc="A package called ${pkgname}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname}
+}
diff --git a/test/packages/pkg-simple-b/test.c b/test/packages/pkg-simple-b/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/test/packages/pkg-simple-b/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/test/packages/pkg-split-a/Makefile b/test/packages/pkg-split-a/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/test/packages/pkg-split-a/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/test/packages/pkg-split-a/PKGBUILD b/test/packages/pkg-split-a/PKGBUILD
new file mode 100644
index 0000000..e941976
--- /dev/null
+++ b/test/packages/pkg-split-a/PKGBUILD
@@ -0,0 +1,28 @@
+pkgbase=pkg-split-a
+pkgname=('pkg-split-a1' 'pkg-split-a2')
+pkgver=1
+pkgrel=1
+pkgdesc="A split package called ${pkgbase}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package_pkg-split-a1() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]}
+}
+
+package_pkg-split-a2() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]}
+}
diff --git a/test/packages/pkg-split-a/test.c b/test/packages/pkg-split-a/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/test/packages/pkg-split-a/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/test/packages/pkg-split-b/Makefile b/test/packages/pkg-split-b/Makefile
new file mode 120000
index 0000000..50be211
--- /dev/null
+++ b/test/packages/pkg-split-b/Makefile
@@ -0,0 +1 @@
+../../src/Makefile \ No newline at end of file
diff --git a/test/packages/pkg-split-b/PKGBUILD b/test/packages/pkg-split-b/PKGBUILD
new file mode 100644
index 0000000..6ddbc45
--- /dev/null
+++ b/test/packages/pkg-split-b/PKGBUILD
@@ -0,0 +1,29 @@
+pkgbase=pkg-split-b
+pkgname=('pkg-split-b1' 'pkg-split-b2')
+pkgver=1
+pkgrel=1
+pkgdesc="A split package called ${pkgbase}"
+arch=('i686' 'x86_64')
+url='http://www.archlinux.org/'
+license=('GPL')
+
+depends=('glibc')
+makedepends=('gcc')
+source=('Makefile' 'test.c')
+md5sums=('c6cb8dcc86253355fed559416d0c8dcf'
+ '3c1e4279feb678fd9cabaccdb28e40d0')
+
+build() {
+ cd ${srcdir}
+ make
+}
+
+package_pkg-split-b1() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[0]}
+}
+
+package_pkg-split-b2() {
+ cd ${srcdir}
+ make install DESTDIR=${pkgdir} DESTBIN=${pkgname[1]}
+}
diff --git a/test/packages/pkg-split-b/test.c b/test/packages/pkg-split-b/test.c
new file mode 120000
index 0000000..ed5b5ac
--- /dev/null
+++ b/test/packages/pkg-split-b/test.c
@@ -0,0 +1 @@
+../../src/test.c \ No newline at end of file
diff --git a/test/runTest b/test/runTest
new file mode 100755
index 0000000..b8713d8
--- /dev/null
+++ b/test/runTest
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+. "$(dirname ${BASH_SOURCE[0]})/lib/common.inc"
+
+for t in "$(dirname ${BASH_SOURCE[0]})/test.d/"*.sh; do
+ l=$(basename ${t} .sh)
+ if [ -x ${t} ]; then
+ msg "Running test '${l}'"
+ ${t}
+ [ $? -ne 0 ] && die "Test '${l}' failed"
+ echo -e "\n\n\n"
+ else
+ warning "Skipping test ${l}"
+ fi
+done
diff --git a/test/src/Makefile b/test/src/Makefile
new file mode 100644
index 0000000..105b730
--- /dev/null
+++ b/test/src/Makefile
@@ -0,0 +1,5 @@
+all:
+ gcc $(CFLAGS) -o test test.c
+
+install:
+ install -D -m755 test $(DESTDIR)/usr/bin/$(DESTBIN)
diff --git a/test/src/test.c b/test/src/test.c
new file mode 100644
index 0000000..a661689
--- /dev/null
+++ b/test/src/test.c
@@ -0,0 +1,7 @@
+#include <stdio.h>
+#include <stdlib.h>
+
+int main(void) {
+ printf("Arch is the best!\n");
+ return EXIT_SUCCESS;
+}
diff --git a/test/test.d/create-filelists.sh b/test/test.d/create-filelists.sh
new file mode 100755
index 0000000..da76710
--- /dev/null
+++ b/test/test.d/create-filelists.sh
@@ -0,0 +1,110 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testCreateSimpleFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ ../cron-jobs/create-filelists
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra.files.tar.gz" | grep -q "usr/bin/${pkgbase}"; then
+ fail "usr/bin/${pkgbase} not found in ${arch}/extra.files.tar.gz"
+ fi
+ done
+ done
+}
+
+testCreateAnyFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+ ../db-update
+
+ ../cron-jobs/create-filelists
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra.files.tar.gz" | grep -q "usr/share/${pkgbase}/test"; then
+ fail "usr/share/${pkgbase}/test not found in ${arch}/extra.files.tar.gz"
+ fi
+ done
+ done
+}
+
+testCreateSplitFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local pkgname
+ local pkgnames
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ ../cron-jobs/create-filelists
+ for pkgbase in ${pkgs[@]}; do
+ pkgnames=($(source "${TMP}/svn-packages-copy/${pkgbase}/trunk/PKGBUILD"; echo ${pkgname[@]}))
+ for pkgname in ${pkgnames[@]}; do
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra.files.tar.gz" | grep -q "usr/bin/${pkgname}"; then
+ fail "usr/bin/${pkgname} not found in ${arch}/extra.files.tar.gz"
+ fi
+ done
+ done
+ done
+}
+
+
+testCleanupFileLists() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+ ../cron-jobs/create-filelists
+
+ for arch in ${arches[@]}; do
+ ../db-remove pkg-simple-a extra ${arch}
+ done
+ ../cron-jobs/create-filelists
+
+ for arch in ${arches[@]}; do
+ if ! bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra.files.tar.gz" | grep -q "usr/bin/pkg-simple-b"; then
+ fail "usr/bin/pkg-simple-b not found in ${arch}/extra.files.tar.gz"
+ fi
+ if bsdtar -xOf "${FTP_BASE}/extra/os/${arch}/extra.files.tar.gz" | grep -q "usr/bin/pkg-simple-a"; then
+ fail "usr/bin/pkg-simple-a still found in ${arch}/extra.files.tar.gz"
+ fi
+ done
+
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/db-move.sh b/test/test.d/db-move.sh
new file mode 100755
index 0000000..57cbf71
--- /dev/null
+++ b/test/test.d/db-move.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testMoveSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../db-move testing extra pkg-simple-a
+
+ for arch in ${arches[@]}; do
+ checkPackage extra pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch}
+ checkRemovedPackage testing pkg-simple-a-1-1-${arch}.pkg.tar.xz ${arch}
+
+ checkPackage testing pkg-simple-b-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+}
+
+testMoveAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage testing ${pkgbase} any
+ done
+
+ ../db-update
+ ../db-move testing extra pkg-any-a
+
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz
+ checkRemovedAnyPackage testing pkg-any-a
+ checkAnyPackage testing pkg-any-b-1-1-any.pkg.tar.xz
+}
+
+testMoveSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+ ../db-move testing extra pkg-split-a
+
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/pkg-split-a"/*-${arch}.pkg.tar.*; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/pkg-split-b"/*-${arch}.pkg.tar.*; do
+ checkPackage testing $(basename ${pkg}) ${arch}
+ done
+ done
+
+ checkRemovedAnyPackage testing pkg-split-a
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/db-remove.sh b/test/test.d/db-remove.sh
new file mode 100755
index 0000000..b66466d
--- /dev/null
+++ b/test/test.d/db-remove.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testRemovePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ ../db-remove ${pkgbase} extra ${arch}
+ done
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkRemovedPackage extra ${pkgbase} ${arch}
+ done
+ done
+}
+
+testRemoveAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ ../db-remove ${pkgbase} extra any
+ done
+
+ for pkgbase in ${pkgs[@]}; do
+ checkRemovedAnyPackage extra ${pkgbase}
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/db-update.sh b/test/test.d/db-update.sh
new file mode 100755
index 0000000..5a3d01d
--- /dev/null
+++ b/test/test.d/db-update.sh
@@ -0,0 +1,159 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testAddSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ checkPackage extra ${pkgbase}-1-1-${arch}.pkg.tar.xz ${arch}
+ done
+ done
+}
+
+testAddSingleSimplePackage() {
+ releasePackage extra 'pkg-simple-a' 'i686'
+ ../db-update
+ checkPackage extra 'pkg-simple-a-1-1-i686.pkg.tar.xz' 'i686'
+}
+
+testAddAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ checkAnyPackage extra ${pkgbase}-1-1-any.pkg.tar.xz
+ done
+}
+
+testAddSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-${arch}.pkg.tar.*; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+ done
+}
+
+testUpdateAnyPackage() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ checkAnyPackage extra pkg-any-a-1-2-any.pkg.tar.xz any
+
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+}
+
+testUpdateAnyPackageToDifferentRepositoriesAtOnce() {
+ releasePackage extra pkg-any-a any
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage testing pkg-any-a any
+
+ ../db-update
+
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+ checkAnyPackage testing pkg-any-a-1-2-any.pkg.tar.xz any
+
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+}
+
+testUpdateSameAnyPackageToSameRepository() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+
+ releasePackage extra pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to the same repository should fail'; return 1)
+}
+
+testUpdateSameAnyPackageToDifferentRepositories() {
+ releasePackage extra pkg-any-a any
+ ../db-update
+ checkAnyPackage extra pkg-any-a-1-1-any.pkg.tar.xz any
+
+ releasePackage testing pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
+
+ local arch
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
+ done
+}
+
+
+testAddIncompleteSplitPackage() {
+ local arches=('i686' 'x86_64')
+ local repo='extra'
+ local pkgbase='pkg-split-a'
+ local arch
+
+ for arch in ${arches[@]}; do
+ releasePackage ${repo} ${pkgbase} ${arch}
+ done
+
+ # remove a split package to make db-update fail
+ rm "${STAGING}"/extra/${pkgbase}1-*
+
+ ../db-update >/dev/null 2>&1 && fail "db-update should fail when a split package is missing!"
+
+ for arch in ${arches[@]}; do
+ ( [ -r "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT%.tar.*}" -O | grep -q ${pkgbase}) \
+ && fail "${pkgbase} should not be in ${repo}/os/${arch}/${repo}${DBEXT%.tar.*}"
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/ftpdir-cleanup.sh b/test/test.d/ftpdir-cleanup.sh
new file mode 100755
index 0000000..e9b977b
--- /dev/null
+++ b/test/test.d/ftpdir-cleanup.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testCleanupSimplePackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove pkg-simple-a extra ${arch}
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for arch in ${arches[@]}; do
+ local pkg1="pkg-simple-a-1-1-${arch}.pkg.tar.xz"
+ checkRemovedPackage extra 'pkg-simple-a' ${arch}
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found"
+
+ local pkg2="pkg-simple-b-1-1-${arch}.pkg.tar.xz"
+ checkPackage extra ${pkg2} ${arch}
+ done
+}
+
+testCleanupAnyPackages() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch='any'
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+
+ ../db-update
+ ../db-remove pkg-any-a extra any
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ local pkg1='pkg-any-a-1-1-any.pkg.tar.xz'
+ checkRemovedAnyPackage extra 'pkg-any-a'
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg1}" ] && fail "${repo}/os/${arch}/${pkg1} found"
+
+ local pkg2="pkg-any-b-1-1-${arch}.pkg.tar.xz"
+ checkAnyPackage extra ${pkg2}
+}
+
+testCleanupSplitPackages() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ for arch in ${arches[@]}; do
+ ../db-remove ${pkgs[0]} extra ${arch}
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-${arch}.pkg.tar.*; do
+ checkRemovedPackage extra ${pkgs[0]} ${arch}
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg}" ] && fail "${PKGPOOL}/${pkg} found"
+ [ -f "${FTP_BASE}/${repo}/os/${arch}/${pkg}" ] && fail "${repo}/os/${arch}/${pkg} found"
+ done
+
+ for pkg in "${pkgdir}/${pkgs[1]}"/*-${arch}.pkg.tar.*; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/packages.sh b/test/test.d/packages.sh
new file mode 100755
index 0000000..324f73a
--- /dev/null
+++ b/test/test.d/packages.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testPackages() {
+ # TODO: namcap -r depends fails with i686 packages
+ find "${pkgdir}" -name "*${PKGEXT}" -exec namcap -e depends {} + || fail 'namcap failed'
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/pool-transition.sh b/test/test.d/pool-transition.sh
new file mode 100755
index 0000000..6d82e00
--- /dev/null
+++ b/test/test.d/pool-transition.sh
@@ -0,0 +1,152 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testMovePackagesWithoutPool() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b' 'pkg-split-a' 'pkg-split-b')
+ local pkgbase
+ local arch
+ local pkg
+ local old
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage testing ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ # transform two packages to old style layout
+ for arch in ${arches[@]}; do
+ for old in 0 2; do
+ for pkg in "${pkgdir}/${pkgs[${old}]}"/*-${arch}.pkg.tar.*; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/testing/os/${arch}/${pkg}"
+ done
+ done
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ ../db-move testing extra ${pkgs[@]}
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-${arch}.pkg.tar.*; do
+ checkPackage extra $(basename ${pkg}) ${arch}
+ done
+ checkRemovedPackage testing ${pkgbase} ${arch}
+ done
+ done
+}
+
+testUpdateAnyPackageWithoutPool() {
+ local pkgname='pkg-any-a'
+ local pkg1='pkg-any-a-1-1-any.pkg.tar.xz'
+ local pkg2='pkg-any-a-1-2-any.pkg.tar.xz'
+ local arch
+
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+ # transform two packages to old style layout
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" "${FTP_BASE}/extra/os/any"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg1}" "${FTP_BASE}/extra/os/${arch}"
+ done
+
+ pushd "${TMP}/svn-packages-copy/${pkgname}/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ extra-i686-build >/dev/null 2>&1
+ mv "${pkg2}" "${pkgdir}/${pkgname}/"
+ popd >/dev/null
+
+ releasePackage extra ${pkgname} any
+ ../db-update
+ rm -f "${pkgdir}/${pkgname}/${pkg2}"
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ checkAnyPackage extra "${pkg2}"
+
+ [ -f "${FTP_BASE}/${PKGPOOL}/${pkg1}" ] && fail "${PKGPOOL}/${pkg1} found"
+ for arch in any i686 x86_64; do
+ [ -f "${FTP_BASE}/extra/os/${arch}/${pkg1}" ] && fail "extra/os/${arch}/${pkg1} found"
+ done
+}
+
+testMoveAnyPackagesWithoutPool() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+ local arch
+ local pkg
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage testing ${pkgbase} any
+ done
+
+ ../db-update
+
+ # transform a package to old style layout
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-any.pkg.tar.*; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/testing/os/any/${pkg}"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg}" "${FTP_BASE}/testing/os/${arch}/${pkg}"
+ done
+ done
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ ../db-move testing extra ${pkgs[@]}
+
+ ../cron-jobs/ftpdir-cleanup >/dev/null
+
+ for pkgbase in ${pkgs[@]}; do
+ for pkg in "${pkgdir}/${pkgbase}"/*-any.pkg.tar.*; do
+ checkAnyPackage extra $(basename ${pkg})
+ done
+ checkRemovedAnyPackage testing ${pkgbase}
+ done
+
+ for pkg in "${pkgdir}/${pkgs[0]}"/*-any.pkg.tar.*; do
+ pkg=$(basename $pkg)
+ for arch in any i686 x86_64; do
+ [ -f "${FTP_BASE}/testing/os/${arch}/${pkg}" ] && fail "testing/os/${arch}/${pkg} found"
+ done
+ done
+}
+
+testUpdateSameAnyPackageToDifferentRepositoriesWithoutPool() {
+ local pkg
+ local arch
+
+ releasePackage extra pkg-any-a any
+ ../db-update
+
+ # transform a package to old style layout
+ for pkg in "${pkgdir}/pkg-any-a"/*-any.pkg.tar.*; do
+ pkg=$(basename $pkg)
+ mv -f "${FTP_BASE}/${PKGPOOL}/${pkg}" "${FTP_BASE}/extra/os/any/${pkg}"
+ for arch in i686 x86_64; do
+ ln -sf "../any/${pkg}" "${FTP_BASE}/extra/os/${arch}/${pkg}"
+ done
+ done
+
+ releasePackage testing pkg-any-a any
+ ../db-update >/dev/null 2>&1 && (fail 'Adding an existing package to another repository should fail'; return 1)
+
+ for arch in i686 x86_64; do
+ ( [ -r "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" ] \
+ && bsdtar -xf "${FTP_BASE}/testing/os/${arch}/testing${DBEXT%.tar.*}" -O | grep -q pkg-any-a) \
+ && fail "pkg-any-a should not be in testing/os/${arch}/testing${DBEXT%.tar.*}"
+ done
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/sourceballs.sh b/test/test.d/sourceballs.sh
new file mode 100755
index 0000000..8cba017
--- /dev/null
+++ b/test/test.d/sourceballs.sh
@@ -0,0 +1,84 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testSourceballs() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testAnySourceballs() {
+ local pkgs=('pkg-any-a' 'pkg-any-b')
+ local pkgbase
+
+ for pkgbase in ${pkgs[@]}; do
+ releasePackage extra ${pkgbase} any
+ done
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testSplitSourceballs() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-split-a' 'pkg-split-b')
+ local pkg
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+
+ ../db-update
+
+ ../cron-jobs/sourceballs
+ for pkgbase in ${pkgs[@]}; do
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/${pkgbase}-*${SRCEXT} ] && fail "source package not found!"
+ done
+}
+
+testSourceballsCleanup() {
+ local arches=('i686' 'x86_64')
+ local pkgs=('pkg-simple-a' 'pkg-simple-b')
+ local pkgbase
+ local arch
+
+ for pkgbase in ${pkgs[@]}; do
+ for arch in ${arches[@]}; do
+ releasePackage extra ${pkgbase} ${arch}
+ done
+ done
+ ../db-update
+ ../cron-jobs/sourceballs
+
+ for arch in ${arches[@]}; do
+ ../db-remove pkg-simple-a extra ${arch}
+ done
+
+ ../cron-jobs/sourceballs
+ [ -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-a-*${SRCEXT} ] && fail "source package was not removed!"
+ [ ! -r ${FTP_BASE}/${SRCPOOL}/pkg-simple-b-*${SRCEXT} ] && fail "source package not found!"
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/test/test.d/testing2x.sh b/test/test.d/testing2x.sh
new file mode 100755
index 0000000..436716d
--- /dev/null
+++ b/test/test.d/testing2x.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+curdir=$(readlink -e $(dirname $0))
+. "${curdir}/../lib/common.inc"
+
+testTesting2xAnyPackage() {
+ releasePackage core pkg-any-a any
+ ../db-update
+
+ pushd "${TMP}/svn-packages-copy/pkg-any-a/trunk/" >/dev/null
+ sed 's/pkgrel=1/pkgrel=2/g' -i PKGBUILD
+ svn commit -q -m"update pkg to pkgrel=2" >/dev/null
+ extra-i686-build >/dev/null 2>&1
+ mv pkg-any-a-1-2-any.pkg.tar.xz "${pkgdir}/pkg-any-a/"
+ popd >/dev/null
+
+ releasePackage testing pkg-any-a any
+ ../db-update
+ rm -f "${pkgdir}/pkg-any-a/pkg-any-a-1-2-any.pkg.tar.xz"
+
+ ../testing2x pkg-any-a
+
+ checkAnyPackage core pkg-any-a-1-2-any.pkg.tar.xz any
+ checkRemovedAnyPackage testing pkg-any-a
+}
+
+. "${curdir}/../lib/shunit2"
diff --git a/testing2x b/testing2x
new file mode 100755
index 0000000..54cae11
--- /dev/null
+++ b/testing2x
@@ -0,0 +1,61 @@
+#!/bin/bash
+
+. "$(dirname $0)/db-functions"
+. "$(dirname $0)/config"
+
+if [ $# -lt 1 ]; then
+ msg "usage: $(basename $0) <pkgname|pkgbase> ..."
+ exit 1
+fi
+
+# Lock everything to reduce possibility of interfering task between the different repo-updates
+script_lock
+for repo in 'core' 'extra' 'testing'; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_lock ${repo} ${pkgarch} || exit 1
+ done
+done
+
+declare -A pkgs
+
+for pkgbase in $*; do
+ if [ ! -d "${WORKDIR}/${pkgbase}" ]; then
+ /usr/bin/svn export -q "${SVNREPO}/${pkgbase}/repos" "${WORKDIR}/${pkgbase}" >/dev/null
+
+ found_source=false
+ for pkgarch in ${ARCHES[@]} 'any'; do
+ svnrepo_from="${WORKDIR}/${pkgbase}/testing-${pkgarch}"
+ if [ -r "${svnrepo_from}/PKGBUILD" ]; then
+ found_source=true
+ break
+ fi
+ done
+ ${found_source} || die "${pkgbase} not found in [testing]"
+ found_target=false
+ for pkgarch in ${ARCHES[@]} 'any'; do
+ for repo in 'core' 'extra'; do
+ svnrepo_to="${WORKDIR}/${pkgbase}/${repo}-${pkgarch}"
+ if [ -r "${svnrepo_to}/PKGBUILD" ]; then
+ found_target=true
+ pkgs[${repo}]+="${pkgbase} "
+ break 2
+ fi
+ done
+ done
+ ${found_target} || die "${pkgbase} neither found in [core] nor [extra]"
+ fi
+done
+
+for pkgarch in ${ARCHES[@]}; do
+ repo_unlock 'testing' ${pkgarch}
+done
+for repo in 'core' 'extra'; do
+ for pkgarch in ${ARCHES[@]}; do
+ repo_unlock ${repo} ${pkgarch}
+ done
+ if [ -n "${pkgs[${repo}]}" ]; then
+ "$(dirname $0)/db-move" 'testing' "${repo}" ${pkgs[${repo}]}
+ fi
+done
+
+script_unlock