summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLuke Shumaker <lukeshu@parabola.nu>2018-09-25 01:09:38 -0400
committerLuke Shumaker <lukeshu@parabola.nu>2018-09-25 01:09:38 -0400
commit18f5224b2b08cf07ccb3d000d5a083e8bcb500cb (patch)
tree39a9ecf9058fd76293d1a14866a4186c42cead80
parente5a863870bdc334c026d298a2663f65c0315273e (diff)
Normalize to the indentation settings in .editorconfig
-rwxr-xr-xcron-jobs/check_archlinux/check_packages.py2
-rwxr-xr-xcron-jobs/make_repo_torrents68
-rwxr-xr-xdb-check-package-libraries234
-rwxr-xr-xdb-check-unsigned-packages.py114
-rwxr-xr-xmake_individual_torrent10
5 files changed, 214 insertions, 214 deletions
diff --git a/cron-jobs/check_archlinux/check_packages.py b/cron-jobs/check_archlinux/check_packages.py
index a77b7ba..d477a48 100755
--- a/cron-jobs/check_archlinux/check_packages.py
+++ b/cron-jobs/check_archlinux/check_packages.py
@@ -258,7 +258,7 @@ def get_repo_hierarchy(repo):
repo_hierarchy = {'core': ['core'], \
'extra': ['core', 'extra'], \
'community': ['core', 'extra', 'community'], \
- 'multilib': ['core', 'extra', 'community', 'multilib'] }
+ 'multilib': ['core', 'extra', 'community', 'multilib'] }
if repo in repo_hierarchy:
return repo_hierarchy[repo]
else:
diff --git a/cron-jobs/make_repo_torrents b/cron-jobs/make_repo_torrents
index 767e182..739419c 100755
--- a/cron-jobs/make_repo_torrents
+++ b/cron-jobs/make_repo_torrents
@@ -20,14 +20,14 @@
username=$( id -un )
case "${username}" in
- repo | root )
- true
- ;;
- * )
- echo "This script must be run as repo user or root user."
- echo "ByeBye!"
- exit 1
- ;;
+ repo | root )
+ true
+ ;;
+ * )
+ echo "This script must be run as repo user or root user."
+ echo "ByeBye!"
+ exit 1
+ ;;
esac
# pacman doesn't support multiple different packages of the same name,
@@ -43,28 +43,28 @@ pkgfilelist=$(mktemp)
# Find any directories that might have packages in them
find "${public_location}" -name 'os' -type 'd' |
- while read dir
- do
- # Find any packages
- find "${dir}" -regex '[^ ]+\.pkg\.tar\.xz'
- done > "${pkgfilelist}"
+ while read dir
+ do
+ # Find any packages
+ find "${dir}" -regex '[^ ]+\.pkg\.tar\.xz'
+ done > "${pkgfilelist}"
while read pkg
do
- pkg_name="${pkg##*/}"
+ pkg_name="${pkg##*/}"
- if [[ -h "${pkg}" ]] # check if it's a symbolic link
- then
- # We get the target of the symlink
- pkg=$( readlink -f "${pkg}" )
- fi
+ if [[ -h "${pkg}" ]] # check if it's a symbolic link
+ then
+ # We get the target of the symlink
+ pkg=$( readlink -f "${pkg}" )
+ fi
- # If a .torrent file does not already exist for this package, we call
- # `make_individual_torrent' to make it.
- if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
- then
- "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
- fi
+ # If a .torrent file does not already exist for this package, we call
+ # `make_individual_torrent' to make it.
+ if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]]
+ then
+ "$script_directory/make_individual_torrent" "${pkg}" "${public_location}"
+ fi
done < "${pkgfilelist}"
# For torrents older than 1 year, we check if it's package still
@@ -74,19 +74,19 @@ done < "${pkgfilelist}"
# probability.
if ! (( $(shuf -e {0..29} | head -1) ))
then
- find -H "${torrent_location}" -mtime +365 -name '*.torrent' -type f |
- while read oldtorrent
- do
- oldtorrentnm="${oldtorrent##*/}"
- correspackagenm="${oldtorrentnm%.torrent}"
-
- grep "${correspackagenm}" "${pkgfilelist}" &> /dev/null || rm "${oldtorrent}"
- done
+ find -H "${torrent_location}" -mtime +365 -name '*.torrent' -type f |
+ while read oldtorrent
+ do
+ oldtorrentnm="${oldtorrent##*/}"
+ correspackagenm="${oldtorrentnm%.torrent}"
+
+ grep "${correspackagenm}" "${pkgfilelist}" &> /dev/null || rm "${oldtorrent}"
+ done
fi
if [[ "${username}" == root ]]
then
- chown repo *
+ chown repo *
fi
rm -f "${pkgfilelist}"
diff --git a/db-check-package-libraries b/db-check-package-libraries
index e24b58e..72b7db9 100755
--- a/db-check-package-libraries
+++ b/db-check-package-libraries
@@ -42,9 +42,9 @@ _DYNAMIC = re.compile(r"^\s*[0-9a-fx]+"
def make_db(path):
- """Make a new, empty, library database at *path*."""
- con = sqlite3.connect(path)
- con.executescript("""
+ """Make a new, empty, library database at *path*."""
+ con = sqlite3.connect(path)
+ con.executescript("""
create table provided(
library varchar not null,
package varchar not null
@@ -54,150 +54,150 @@ create table used(
package varchar not null
);
""")
- con.close()
+ con.close()
def begin(database):
- """Connect to *database* and start a transaction."""
- con = sqlite3.connect(database)
- con.execute("begin exclusive")
- return con
+ """Connect to *database* and start a transaction."""
+ con = sqlite3.connect(database)
+ con.execute("begin exclusive")
+ return con
def add_provided(con, package, libraries):
- """Write that *package* provides *libraries*."""
- for library in libraries:
- con.execute("insert into provided (package, library) values (?,?)",
- (package, library))
+ """Write that *package* provides *libraries*."""
+ for library in libraries:
+ con.execute("insert into provided (package, library) values (?,?)",
+ (package, library))
def add_used(con, package, libraries):
- """Write that *package* uses *libraries*."""
- for library in libraries:
- con.execute("insert into used (package, library) values (?,?)",
- (package, library))
+ """Write that *package* uses *libraries*."""
+ for library in libraries:
+ con.execute("insert into used (package, library) values (?,?)",
+ (package, library))
def remove_package(con, package):
- """Remove all entries for a package."""
- con.execute("delete from provided where package=?", (package,))
- con.execute("delete from used where package=?", (package,))
+ """Remove all entries for a package."""
+ con.execute("delete from provided where package=?", (package,))
+ con.execute("delete from used where package=?", (package,))
def add_package(con, package):
- """Add entries from a named *package*."""
- # Extract to a temporary directory. This could be done more
- # efficiently, since there is no need to store more than one file
- # at once.
- print("adding package:", package)
- with tempfile.TemporaryDirectory(None, "db-check-package-libraries."+os.path.basename(package)+".") as temp:
- subprocess.Popen(("bsdtar", "xf", package, "-C", temp)).communicate()
- subprocess.Popen(('find', temp,
- '-type', 'd',
- '(', '-not', '-readable', '-o', '-not', '-executable', ')',
- '-exec', 'chmod', '755', '--', '{}', ';')).communicate()
- subprocess.Popen(('find', temp,
- '-type', 'f',
- '-not', '-readable',
- '-exec', 'chmod', '644', '--', '{}', ';')).communicate()
- with open(os.path.join(temp, ".PKGINFO")) as pkginfo:
- for line in pkginfo:
- if line.startswith("pkgname ="):
- pkgname = line[len("pkgname ="):].strip()
- break
- # Don't list previously removed libraries.
- remove_package(con, pkgname)
- provided = set()
- used = set()
- # Search for ELFs.
- for dirname, dirnames, filenames in os.walk(temp):
- assert dirnames is not None # unused, avoid pylint warning
- for file_name in filenames:
- path = os.path.join(dirname, file_name)
- if os.path.islink(path) or not os.path.isfile(path):
- continue
- with open(path, "rb") as file_object:
- if file_object.read(4) != b"\177ELF":
- continue
- readelf = subprocess.Popen(("readelf", "-d", path),
- stdout=subprocess.PIPE)
- for line in readelf.communicate()[0].split(b"\n"):
- match = _DYNAMIC.match(line.decode("ascii"))
- if match:
- if match.group(1) == "SONAME":
- provided.add(match.group(2))
- elif match.group(1) == "NEEDED":
- used.add(match.group(2))
- else:
- raise AssertionError("unknown entry type "
- + match.group(1))
- add_provided(con, pkgname, provided)
- add_used(con, pkgname, used)
+ """Add entries from a named *package*."""
+ # Extract to a temporary directory. This could be done more
+ # efficiently, since there is no need to store more than one file
+ # at once.
+ print("adding package:", package)
+ with tempfile.TemporaryDirectory(None, "db-check-package-libraries."+os.path.basename(package)+".") as temp:
+ subprocess.Popen(("bsdtar", "xf", package, "-C", temp)).communicate()
+ subprocess.Popen(('find', temp,
+ '-type', 'd',
+ '(', '-not', '-readable', '-o', '-not', '-executable', ')',
+ '-exec', 'chmod', '755', '--', '{}', ';')).communicate()
+ subprocess.Popen(('find', temp,
+ '-type', 'f',
+ '-not', '-readable',
+ '-exec', 'chmod', '644', '--', '{}', ';')).communicate()
+ with open(os.path.join(temp, ".PKGINFO")) as pkginfo:
+ for line in pkginfo:
+ if line.startswith("pkgname ="):
+ pkgname = line[len("pkgname ="):].strip()
+ break
+ # Don't list previously removed libraries.
+ remove_package(con, pkgname)
+ provided = set()
+ used = set()
+ # Search for ELFs.
+ for dirname, dirnames, filenames in os.walk(temp):
+ assert dirnames is not None # unused, avoid pylint warning
+ for file_name in filenames:
+ path = os.path.join(dirname, file_name)
+ if os.path.islink(path) or not os.path.isfile(path):
+ continue
+ with open(path, "rb") as file_object:
+ if file_object.read(4) != b"\177ELF":
+ continue
+ readelf = subprocess.Popen(("readelf", "-d", path),
+ stdout=subprocess.PIPE)
+ for line in readelf.communicate()[0].split(b"\n"):
+ match = _DYNAMIC.match(line.decode("ascii"))
+ if match:
+ if match.group(1) == "SONAME":
+ provided.add(match.group(2))
+ elif match.group(1) == "NEEDED":
+ used.add(match.group(2))
+ else:
+ raise AssertionError("unknown entry type "
+ + match.group(1))
+ add_provided(con, pkgname, provided)
+ add_used(con, pkgname, used)
def init(arguments):
- """Initialize."""
- make_db(arguments.database)
+ """Initialize."""
+ make_db(arguments.database)
def add(arguments):
- """Add packages."""
- con = begin(arguments.database)
- for package in arguments.packages:
- add_package(con, package)
- con.commit()
- con.close()
+ """Add packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ add_package(con, package)
+ con.commit()
+ con.close()
def remove(arguments):
- """Remove packages."""
- con = begin(arguments.database)
- for package in arguments.packages:
- remove_package(con, package)
- con.commit()
- con.close()
+ """Remove packages."""
+ con = begin(arguments.database)
+ for package in arguments.packages:
+ remove_package(con, package)
+ con.commit()
+ con.close()
def check(arguments):
- """List broken packages."""
- con = begin(arguments.database)
- available = set(row[0] for row
- in con.execute("select library from provided"))
- for package, library in con.execute("select package, library from used"):
- if library not in available:
- print(package, "needs", library)
- con.close()
+ """List broken packages."""
+ con = begin(arguments.database)
+ available = set(row[0] for row
+ in con.execute("select library from provided"))
+ for package, library in con.execute("select package, library from used"):
+ if library not in available:
+ print(package, "needs", library)
+ con.close()
def main():
- """Get arguments and run the command."""
- from argparse import ArgumentParser
- parser = ArgumentParser(prog="db-check-package-libraries",
- description="Check packages for "
- "provided/needed libraries")
- parser.add_argument("-d", "--database", type=str,
- help="Database file to use",
- default="package-libraries.sqlite")
- subparsers = parser.add_subparsers()
- subparser = subparsers.add_parser(name="init",
- help="initialize the database")
- subparser.set_defaults(command=init)
- subparser = subparsers.add_parser(name="add",
- help="add packages to database")
- subparser.add_argument("packages", nargs="+", type=str,
- help="package files to add")
- subparser.set_defaults(command=add)
- subparser = subparsers.add_parser(name="remove",
- help="remove packages from database")
- subparser.add_argument("packages", nargs="+", type=str,
- help="package names to remove")
- subparser.set_defaults(command=remove)
- subparser = subparsers.add_parser(name="check",
- help="list broken packages")
- subparser.set_defaults(command=check)
- arguments = parser.parse_args()
- arguments.command(arguments)
+ """Get arguments and run the command."""
+ from argparse import ArgumentParser
+ parser = ArgumentParser(prog="db-check-package-libraries",
+ description="Check packages for "
+ "provided/needed libraries")
+ parser.add_argument("-d", "--database", type=str,
+ help="Database file to use",
+ default="package-libraries.sqlite")
+ subparsers = parser.add_subparsers()
+ subparser = subparsers.add_parser(name="init",
+ help="initialize the database")
+ subparser.set_defaults(command=init)
+ subparser = subparsers.add_parser(name="add",
+ help="add packages to database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package files to add")
+ subparser.set_defaults(command=add)
+ subparser = subparsers.add_parser(name="remove",
+ help="remove packages from database")
+ subparser.add_argument("packages", nargs="+", type=str,
+ help="package names to remove")
+ subparser.set_defaults(command=remove)
+ subparser = subparsers.add_parser(name="check",
+ help="list broken packages")
+ subparser.set_defaults(command=check)
+ arguments = parser.parse_args()
+ arguments.command(arguments)
if __name__ == "__main__":
- main()
+ main()
diff --git a/db-check-unsigned-packages.py b/db-check-unsigned-packages.py
index 80cff51..625c0e8 100755
--- a/db-check-unsigned-packages.py
+++ b/db-check-unsigned-packages.py
@@ -34,63 +34,63 @@ import tarfile
def main():
- """Do the job."""
- check_keys = False
- if "--keyset" in sys.argv:
- sys.argv.remove("--keyset")
- check_keys = True
- repo = sys.argv[1]
- pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:])
- packages = []
- keys = []
- with tarfile.open(fileobj=sys.stdin.buffer) as archive:
- for entry in archive:
- if entry.name.endswith("/desc"):
- content = archive.extractfile(entry)
- skip = False
- is_arch = False
- key = None
- for line in content:
- if is_arch:
- is_arch = False
- if pkgarches and line.strip() not in pkgarches:
- skip = True # different architecture
- break
- if line == b"%PGPSIG%\n":
- skip = True # signed
- key = b""
- if check_keys:
- continue
- else:
- break
- if line == b"%ARCH%\n":
- is_arch = True
- continue
- if key is not None:
- if line.strip():
- key += line.strip()
- else:
- break
- if check_keys and key:
- key_binary = base64.b64decode(key)
- keys.append(key_binary)
- packages.append(repo + "/" + entry.name[:-5])
- if skip:
- continue
- print(repo + "/" + entry.name[:-5])
- if check_keys and keys:
- # We have collected all signed package names in packages and
- # all keys in keys. Let's now ask gpg to list all signatures
- # and find which keys made them.
- packets = subprocess.check_output(("gpg", "--list-packets"),
- input=b"".join(keys))
- i = 0
- for line in packets.decode("latin1").split("\n"):
- if line.startswith(":signature packet:"):
- keyid = line[line.index("keyid ") + len("keyid "):]
- print(packages[i], keyid)
- i += 1
+ """Do the job."""
+ check_keys = False
+ if "--keyset" in sys.argv:
+ sys.argv.remove("--keyset")
+ check_keys = True
+ repo = sys.argv[1]
+ pkgarches = frozenset(name.encode("utf-8") for name in sys.argv[2:])
+ packages = []
+ keys = []
+ with tarfile.open(fileobj=sys.stdin.buffer) as archive:
+ for entry in archive:
+ if entry.name.endswith("/desc"):
+ content = archive.extractfile(entry)
+ skip = False
+ is_arch = False
+ key = None
+ for line in content:
+ if is_arch:
+ is_arch = False
+ if pkgarches and line.strip() not in pkgarches:
+ skip = True # different architecture
+ break
+ if line == b"%PGPSIG%\n":
+ skip = True # signed
+ key = b""
+ if check_keys:
+ continue
+ else:
+ break
+ if line == b"%ARCH%\n":
+ is_arch = True
+ continue
+ if key is not None:
+ if line.strip():
+ key += line.strip()
+ else:
+ break
+ if check_keys and key:
+ key_binary = base64.b64decode(key)
+ keys.append(key_binary)
+ packages.append(repo + "/" + entry.name[:-5])
+ if skip:
+ continue
+ print(repo + "/" + entry.name[:-5])
+ if check_keys and keys:
+ # We have collected all signed package names in packages and
+ # all keys in keys. Let's now ask gpg to list all signatures
+ # and find which keys made them.
+ packets = subprocess.check_output(("gpg", "--list-packets"),
+ input=b"".join(keys))
+ i = 0
+ for line in packets.decode("latin1").split("\n"):
+ if line.startswith(":signature packet:"):
+ keyid = line[line.index("keyid ") + len("keyid "):]
+ print(packages[i], keyid)
+ i += 1
if __name__ == "__main__":
- main()
+ main()
diff --git a/make_individual_torrent b/make_individual_torrent
index 0200e9a..d44bb24 100755
--- a/make_individual_torrent
+++ b/make_individual_torrent
@@ -30,14 +30,14 @@ seed_url='http://repo.parabolagnulinux.org/'
if [[ -z "${1}" ]]
then
- echo "Error. First arg must be the path of the package."
- echo 1
+ echo "Error. First arg must be the path of the package."
+ echo 1
fi
if [[ -z "${2}" ]]
then
- echo "Error. Second arg must be the public location."
- echo 1
+ echo "Error. Second arg must be the public location."
+ echo 1
fi
pkg="${1}"
@@ -49,4 +49,4 @@ pkg_name="${pkg##*/}"
webseed="${seed_url}${pkg#${public_location}}"
mktorrent -a "${trackers}" "${pkg}" -w "${webseed}" >/dev/null ||
-echo "Error making torrent for \"${pkg}\""
+ echo "Error making torrent for \"${pkg}\""