From b878c56002c5777dcbf6d713b0002aead5169286 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?coadde=20=5BM=C3=A1rcio=20Alexandre=20Silva=20Delgado=5D?= Date: Wed, 26 Aug 2015 04:39:17 -0300 Subject: remove more duplicated files --- .../cron-jobs/check_archlinux/check_packages.py | 508 --------------------- .../cron-jobs/check_archlinux/parse_pkgbuilds.sh | 153 ------- extra/lukeshu-xbs/cron-jobs/devlist-mailer | 27 -- extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup | 96 ---- extra/lukeshu-xbs/cron-jobs/integrity-check | 32 -- extra/lukeshu-xbs/cron-jobs/make_repo_torrents | 70 --- extra/lukeshu-xbs/cron-jobs/sourceballs | 150 ------ extra/lukeshu-xbs/cron-jobs/sourceballs.force | 4 - extra/lukeshu-xbs/cron-jobs/sourceballs.skip | 43 -- 9 files changed, 1083 deletions(-) delete mode 100755 extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py delete mode 100755 extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh delete mode 100755 extra/lukeshu-xbs/cron-jobs/devlist-mailer delete mode 100755 extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup delete mode 100755 extra/lukeshu-xbs/cron-jobs/integrity-check delete mode 100755 extra/lukeshu-xbs/cron-jobs/make_repo_torrents delete mode 100755 extra/lukeshu-xbs/cron-jobs/sourceballs delete mode 100644 extra/lukeshu-xbs/cron-jobs/sourceballs.force delete mode 100644 extra/lukeshu-xbs/cron-jobs/sourceballs.skip (limited to 'extra/lukeshu-xbs/cron-jobs') diff --git a/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py b/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py deleted file mode 100755 index ac0194f..0000000 --- a/extra/lukeshu-xbs/cron-jobs/check_archlinux/check_packages.py +++ /dev/null @@ -1,508 +0,0 @@ -#!/usr/bin/env python2 -# -# check_archlinux.py -# -# Original script by Scott Horowitz -# Rewritten by Xavier Chantry -# -# This script currently checks for a number of issues in your ABS tree: -# 1. Directories with missing PKGBUILDS -# 2. Invalid PKGBUILDs (bash syntax error for instance) -# 3. PKGBUILD names that don't match their directory -# 4. Duplicate PKGBUILDs -# 5. Valid arch's in PKGBUILDS -# 6. Missing (make-)dependencies -# 7. Hierarchy of repos (e.g., that a core package doesn't depend on -# a non-core package) -# 8. Circular dependencies - -import os,re,commands,getopt,sys,tarfile -import pdb - -import ctypes -_alpm = ctypes.cdll.LoadLibrary("libalpm.so") - -DBEXT='.db.tar.gz' - -packages = {} # pkgname : PacmanPackage -repopkgs = {} # pkgname : PacmanPackage -provisions = {} # provision : PacmanPackage -pkgdeps,makepkgdeps = {},{} # PacmanPackage : list of the PacmanPackage dependencies -invalid_pkgbuilds = [] -missing_pkgbuilds = [] -dups = [] - -dbonly = [] -absonly = [] - -mismatches = [] -missing_deps = [] -missing_makedeps = [] -invalid_archs = [] -dep_hierarchy = [] -makedep_hierarchy = [] -circular_deps = [] # pkgname>dep1>dep2>...>pkgname -checked_deps = [] - -class PacmanPackage: - def __init__(self): - self.name,self.version = "","" - self.base = "" - self.path,self.repo = "","" - self.deps,self.makedeps = [],[] - self.provides,self.conflicts = [],[] - self.archs = [] - -class Depend: - def __init__(self,name,version,mod): - self.name = name - self.version = version - self.mod = mod - -def parse_pkgbuilds(repos,arch): - for absroot in absroots: - for repo in repos: - cmd = os.path.dirname(os.path.realpath(sys.argv[0])) + '/parse_pkgbuilds.sh ' - cmd += arch + ' ' + absroot + '/' + repo - (status,output) = commands.getstatusoutput(cmd) - if status != 0: - print "Error : failed to run '%s'" % cmd - sys.exit() - parse_data(repo,output) - -def parse_data(repo,data): - attrname = None - - for line in data.split('\n'): - if line.startswith('%'): - attrname = line.strip('%').lower() - elif line.strip() == '': - attrname = None - elif attrname == "invalid": - if repo in repos: - invalid_pkgbuilds.append(line) - elif attrname == "missing": - if repo in repos: - missing_pkgbuilds.append(line) - elif attrname == "name": - pkg = PacmanPackage() - pkg.name = line - pkg.repo = repo - dup = None - if pkg.name in packages: - dup = packages[pkg.name] - else: - packages[pkg.name] = pkg - elif attrname == "base": - pkg.base = line - elif attrname == "version": - pkg.version = line - elif attrname == "path": - pkg.path = line - if dup != None and (pkg.repo in repos or dup.repo in repos): - dups.append(pkg.path + " vs. " + dup.path) - elif attrname == "arch": - pkg.archs.append(line) - elif attrname == "depends": - pkg.deps.append(line) - elif attrname == "makedepends": - pkg.makedeps.append(line) - elif attrname == "conflicts": - pkg.conflicts.append(line) - elif attrname == "provides": - pkg.provides.append(line) - -def parse_dbs(repos,arch): - dbpkgs = {} - for repo in repos: - pkgs = set([]) - db = tarfile.open(os.path.join(repodir,repo,'os',arch,repo + DBEXT)) - for line in db.getnames(): - if not '/' in line: - pkgs.add(line.rsplit('-',2)[0]) - dbpkgs[repo] = pkgs - return(dbpkgs) - -def splitdep(dep): - name = dep - version = "" - mod = "" - for char in (">=", "<=", "=", ">", "<"): - pos = dep.find(char) - if pos > -1: - name = dep[:pos] - version = dep[pos:].replace(char, "") - mod = char - break - return Depend(name,version,mod) - -def splitprov(prov): - name = prov - version = "" - pos = prov.find("=") - if pos > -1: - name = prov[:pos] - version = prov[pos:].replace("=", "") - return (name,version) - -def vercmp(v1,mod,v2): - """ - >>> vercmp("1.0", "<=", "2.0") - True - >>> vercmp("1:1.0", ">", "2.0") - True - >>> vercmp("1.0.2", ">=", "2.1.0") - False - """ - s1 = ctypes.c_char_p(v1) - s2 = ctypes.c_char_p(v2) - res = _alpm.alpm_pkg_vercmp(s1,s2) - if res == 0: - return (mod.find("=") > -1) - elif res < 0: - return (mod.find("<") > -1) - elif res > 0: - return (mod.find(">") > -1) - return False - - -def depcmp(name,version,dep): - if name != dep.name: - return False - if dep.version == "" or dep.mod == "": - return True - if version == "": - return False - return vercmp(version,dep.mod,dep.version) - -def provcmp(pkg,dep): - for prov in pkg.provides: - (provname,provver) = splitprov(prov) - if depcmp(provname,provver,dep): - return True - return False - -def verify_dep(dep): - dep = splitdep(dep) - if dep.name in packages: - pkg = packages[dep.name] - if depcmp(pkg.name,pkg.version,dep): - return [pkg] - if dep.name in provisions: - provlist = provisions[dep.name] - results = [] - for prov in provlist: - if provcmp(prov,dep): - results.append(prov) - return results - return [] - -def verify_deps(name,repo,deps): - pkg_deps = [] - missdeps = [] - hierarchy = [] - for dep in deps: - pkglist = verify_dep(dep) - if pkglist == []: - missdeps.append(repo + "/" + name + " --> '" + dep + "'") - else: - valid_repos = get_repo_hierarchy(repo) - pkgdep = None - for pkg in pkglist: - if pkg.repo in valid_repos: - pkgdep = pkg - break - if not pkgdep: - pkgdep = pkglist[0] - hierarchy.append((repo,name,pkgdep)) - - pkg_deps.append(pkgdep) - - return (pkg_deps,missdeps,hierarchy) - -def compute_deplist(pkg): - list = [] - stack = [pkg] - while stack != []: - dep = stack.pop() - if dep in pkgdeps: - for dep2 in pkgdeps[dep]: - if dep2 not in list: - list.append(dep2) - stack.append(dep2) - if dep in makepkgdeps: - for dep2 in makepkgdeps[dep]: - if dep2 not in list: - list.append(dep2) - stack.append(dep2) - return list - -def check_hierarchy(deph): - hierarchy = [] - for (repo,name,pkgdep) in deph: - deplist = compute_deplist(pkgdep) - valid_repos = get_repo_hierarchy(repo) - extdeps = [] - for dep in deplist: - if dep.repo not in valid_repos: - extdeps.append(dep.name) - string = repo + "/" + name + " depends on " + pkgdep.repo + "/" + pkgdep.name + " (" - string += "%s extra (make)deps to pull" % len(extdeps) - if 0 < len(extdeps) < 10: - string += " : " + ' '.join(extdeps) - string += ")" - hierarchy.append(string) - return hierarchy - -def get_repo_hierarchy(repo): - repo_hierarchy = {'core': ['core'], \ - 'extra': ['core', 'extra'], \ - 'community': ['core', 'extra', 'community'], \ - 'multilib': ['core', 'extra', 'community', 'multilib'] } - if repo in repo_hierarchy: - return repo_hierarchy[repo] - else: - return ['core','extra','community'] - -def verify_archs(name,repo,archs): - valid_archs = ['any', 'i686', 'x86_64'] - invalid_archs = [] - for arch in archs: - if arch not in valid_archs: - invalid_archs.append(repo + "/" + name + " --> " + arch) - return invalid_archs - -def find_scc(packages): - # reset all variables - global index,S,pkgindex,pkglowlink - index = 0 - S = [] - pkgindex = {} - pkglowlink = {} - cycles = [] - for pkg in packages: - tarjan(pkg) - -def tarjan(pkg): - global index,S,pkgindex,pkglowlink,cycles - pkgindex[pkg] = index - pkglowlink[pkg] = index - index += 1 - checked_deps.append(pkg) - S.append(pkg) - deps = [] - if pkg in pkgdeps: - deps = pkgdeps[pkg] - for dep in deps: - if dep not in pkgindex: - tarjan(dep) - pkglowlink[pkg] = min(pkglowlink[pkg],pkglowlink[dep]) - elif dep in S: - pkglowlink[pkg] = min(pkglowlink[pkg],pkgindex[dep]) - if pkglowlink[pkg] == pkgindex[pkg]: - dep = S.pop() - if pkg == dep: - return - path = pkg.name - while pkg != dep: - path = dep.repo + "/" + dep.name + ">" + path - dep = S.pop() - path = dep.name + ">" + path - if pkg.repo in repos: - circular_deps.append(path) - -def print_heading(heading): - print "" - print "=" * (len(heading) + 4) - print "= " + heading + " =" - print "=" * (len(heading) + 4) - -def print_subheading(subheading): - print "" - print subheading - print "-" * (len(subheading) + 2) - -def print_missdeps(pkgname,missdeps) : - for d in missdeps: - print pkgname + " : " + d - -def print_result(list, subheading): - if len(list) > 0: - list.sort() - print_subheading(subheading) - for item in list: - print item - -def print_results(): - print_result(missing_pkgbuilds, "Missing PKGBUILDs") - print_result(invalid_pkgbuilds, "Invalid PKGBUILDs") - print_result(mismatches, "Mismatched Pkgnames") - print_result(dups, "Duplicate PKGBUILDs") - print_result(invalid_archs, "Invalid Archs") - print_result(missing_deps, "Missing Dependencies") - print_result(missing_makedeps, "Missing Makedepends") - print_result(dep_hierarchy, "Repo Hierarchy for Dependencies") - print_result(makedep_hierarchy, "Repo Hierarchy for Makedepends") - print_result(circular_deps, "Circular Dependencies") - print_result(dbonly, "Packages found in db, but not in tree") - print_result(absonly,"Packages found in tree, but not in db") - print_subheading("Summary") - print "Missing PKGBUILDs: ", len(missing_pkgbuilds) - print "Invalid PKGBUILDs: ", len(invalid_pkgbuilds) - print "Mismatching PKGBUILD names: ", len(mismatches) - print "Duplicate PKGBUILDs: ", len(dups) - print "Invalid archs: ", len(invalid_archs) - print "Missing (make)dependencies: ", len(missing_deps)+len(missing_makedeps) - print "Repo hierarchy problems: ", len(dep_hierarchy)+len(makedep_hierarchy) - print "Circular dependencies: ", len(circular_deps) - print "In db, but not in tree: ", len(dbonly) - print "In tree, but not in db: ", len(absonly) - print "" - -def print_usage(): - print "" - print "Usage: ./check_packages.py [OPTION]" - print "" - print "Options:" - print " --abs-tree= Check the specified tree(s) (default : /var/abs)" - print " --repos= Check the specified repos (default : core,extra)" - print " --arch= Check the specified arch (default : i686)" - print " --repo-dir= Check the dbs at the specified path (default : /srv/ftp)" - print " -h, --help Show this help and exit" - print "" - print "Examples:" - print "\n Check core and extra in existing abs tree:" - print " ./check_packages.py --abs-tree=/var/abs --repos=core,extra --arch=i686" - print "\n Check community:" - print " ./check_packages.py --abs-tree=/var/abs --repos=community --arch=i686" - print "" - -if __name__ == "__main__": - ## Default path to the abs root directory - absroots = ["/var/abs"] - ## Default list of repos to check - repos = ['core', 'extra'] - ## Default arch - arch = "i686" - ## Default repodir - repodir = "/srv/ftp" - - try: - opts, args = getopt.getopt(sys.argv[1:], "", ["abs-tree=", "repos=", - "arch=", "repo-dir="]) - except getopt.GetoptError: - print_usage() - sys.exit() - if opts != []: - for o, a in opts: - if o in ("--abs-tree"): - absroots = a.split(',') - elif o in ("--repos"): - repos = a.split(",") - elif o in ("--arch"): - arch = a - elif o in ("--repo-dir"): - repodir = a - else: - print_usage() - sys.exit() - if args != []: - print_usage() - sys.exit() - - for absroot in absroots: - if not os.path.isdir(absroot): - print "Error : the abs tree " + absroot + " does not exist" - sys.exit() - for repo in repos: - repopath = absroot + "/" + repo - if not os.path.isdir(repopath): - print("Warning : the repository " + repo + " does not exist in " + absroot) - - if not os.path.isdir(repodir): - print "Error: the repository directory %s does not exist" % repodir - sys.exit() - for repo in repos: - path = os.path.join(repodir,repo,'os',arch,repo + DBEXT) - if not os.path.isfile(path): - print "Error : repo DB %s : File not found" % path - sys.exit() - if not tarfile.is_tarfile(path): - print "Error : Cant open repo DB %s, not a valid tar file" % path - sys.exit() - # repos which need to be loaded - loadrepos = set([]) - for repo in repos: - loadrepos = loadrepos | set(get_repo_hierarchy(repo)) - - print_heading("Integrity Check " + arch + " of " + ",".join(repos)) - print("\nPerforming integrity checks...") - - print("==> parsing pkgbuilds") - parse_pkgbuilds(loadrepos,arch) - - # fill provisions - for name,pkg in packages.iteritems(): - for prov in pkg.provides: - provname=prov.split("=")[0] - if provname not in provisions: - provisions[provname] = [] - provisions[provname].append(pkg) - - # fill repopkgs - for name,pkg in packages.iteritems(): - if pkg.repo in repos: - repopkgs[name] = pkg - - print("==> parsing db files") - dbpkgs = parse_dbs(repos,arch) - - print("==> checking mismatches") - for name,pkg in repopkgs.iteritems(): - pkgdirname = pkg.path.split("/")[-1] - if name != pkgdirname and pkg.base != pkgdirname: - mismatches.append(name + " vs. " + pkg.path) - - print("==> checking archs") - for name,pkg in repopkgs.iteritems(): - archs = verify_archs(name,pkg.repo,pkg.archs) - invalid_archs.extend(archs) - - deph,makedeph = [],[] - - print("==> checking dependencies") - for name,pkg in repopkgs.iteritems(): - (deps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.deps) - pkgdeps[pkg] = deps - missing_deps.extend(missdeps) - deph.extend(hierarchy) - - print("==> checking makedepends") - for name,pkg in repopkgs.iteritems(): - (makedeps,missdeps,hierarchy) = verify_deps(name,pkg.repo,pkg.makedeps) - makepkgdeps[pkg] = makedeps - missing_makedeps.extend(missdeps) - makedeph.extend(hierarchy) - - print("==> checking hierarchy") - dep_hierarchy = check_hierarchy(deph) - makedep_hierarchy = check_hierarchy(makedeph) - - print("==> checking for circular dependencies") - # make sure pkgdeps is filled for every package - for name,pkg in packages.iteritems(): - if pkg not in pkgdeps: - (deps,missdeps,_) = verify_deps(name,pkg.repo,pkg.deps) - pkgdeps[pkg] = deps - find_scc(repopkgs.values()) - - print("==> checking for differences between db files and pkgbuilds") - for repo in repos: - for pkg in dbpkgs[repo]: - if not (pkg in repopkgs and repopkgs[pkg].repo == repo): - dbonly.append("%s/%s" % (repo,pkg)) - for name,pkg in repopkgs.iteritems(): - if not name in dbpkgs[pkg.repo]: - absonly.append("%s/%s" % (pkg.repo,name)) - - print_results() diff --git a/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh b/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh deleted file mode 100755 index b857ac8..0000000 --- a/extra/lukeshu-xbs/cron-jobs/check_archlinux/parse_pkgbuilds.sh +++ /dev/null @@ -1,153 +0,0 @@ -#!/bin/bash - -# Usage : parse_pkgbuilds.sh arch -# Example : parse_pkgbuilds.sh i686 /var/abs/core /var/abs/extra - -exit() { return; } - -splitpkg_overrides=('depends' 'optdepends' 'provides' 'conflicts') -variables=('pkgname' 'pkgbase' 'epoch' 'pkgver' 'pkgrel' 'makedepends' 'arch' "${splitpkg_overrides[@]}") -readonly -a variables splitpkg_overrides - -backup_package_variables() { - for var in "${splitpkg_overrides[@]}"; do - indirect="${var}_backup" - eval "${indirect}=(\"\${$var[@]}\")" - done -} - -restore_package_variables() { - for var in "${splitpkg_overrides[@]}"; do - indirect="${var}_backup" - if [ -n "${!indirect}" ]; then - eval "${var}=(\"\${$indirect[@]}\")" - else - unset "${var}" - fi - done -} - -print_info() { - echo -e "%NAME%\n$pkgname\n" - if [ -n "$epoch" ]; then - echo -e "%VERSION%\n$epoch:$pkgver-$pkgrel\n" - else - echo -e "%VERSION%\n$pkgver-$pkgrel\n" - fi - echo -e "%PATH%\n$dir\n" - - if [ -n "$pkgbase" ]; then - echo -e "%BASE%\n$pkgbase\n" - fi - - if [ -n "$arch" ]; then - echo "%ARCH%" - for i in "${arch[@]}"; do echo "$i"; done - echo "" - fi - if [ -n "$depends" ]; then - echo "%DEPENDS%" - for i in "${depends[@]}"; do - echo "$i" - done - echo "" - fi - if [ -n "$makedepends" ]; then - echo "%MAKEDEPENDS%" - for i in "${makedepends[@]}"; do - echo "$i" - done - echo "" - fi - if [ -n "$conflicts" ]; then - echo "%CONFLICTS%" - for i in "${conflicts[@]}"; do echo "$i"; done - echo "" - fi - if [ -n "$provides" ]; then - echo "%PROVIDES%" - for i in "${provides[@]}"; do echo "$i"; done - echo "" - fi -} - -source_pkgbuild() { - ret=0 - dir=$1 - pkgbuild=$dir/PKGBUILD - for var in "${variables[@]}"; do - unset "${var}" - done - source "$pkgbuild" &>/dev/null || ret=$? - - # ensure $pkgname and $pkgver variables were found - if [ $ret -ne 0 -o -z "$pkgname" -o -z "$pkgver" ]; then - echo -e "%INVALID%\n$pkgbuild\n" - return 1 - fi - - if [ "${#pkgname[@]}" -gt "1" ]; then - pkgbase=${pkgbase:-${pkgname[0]}} - for pkg in "${pkgname[@]}"; do - if [ "$(type -t "package_${pkg}")" != "function" ]; then - echo -e "%INVALID%\n$pkgbuild\n" - return 1 - else - backup_package_variables - pkgname=$pkg - while IFS= read -r line; do - var=${line%%=*} - var="${var#"${var%%[![:space:]]*}"}" # remove leading whitespace characters - for realvar in "${variables[@]}"; do - if [ "$var" == "$realvar" ]; then - eval $line - break - fi - done - done < <(type "package_${pkg}") - print_info - restore_package_variables - fi - done - else - echo - print_info - fi - - return 0 -} - -find_pkgbuilds() { - #Skip over some dirs - local d="${1##*/}" - if [ "$d" = "CVS" -o "$d" = ".svn" ]; then - return - fi - - if [ -f "$1/PKGBUILD" ]; then - source_pkgbuild "$1" - return - fi - empty=1 - for dir in "$1"/*; do - if [ -d "$dir" ]; then - find_pkgbuilds "$dir" - unset empty - fi - done - if [ -n "$empty" ]; then - echo -e "%MISSING%\n$1\n" - fi -} - -if [ -z "$1" -o -z "$2" ]; then - exit 1 -fi - -CARCH=$1 -shift -for dir in "$@"; do - find_pkgbuilds "$dir" -done - -exit 0 diff --git a/extra/lukeshu-xbs/cron-jobs/devlist-mailer b/extra/lukeshu-xbs/cron-jobs/devlist-mailer deleted file mode 100755 index 7f298b9..0000000 --- a/extra/lukeshu-xbs/cron-jobs/devlist-mailer +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -#Dummy helper to send email to arch-dev -# It does nothing if no output - -# Load $LIST and $FROM from the config file -. "$(dirname "$(readlink -e "$0")")/../config" - -SUBJECT="Repository Maintenance $(date +"%d-%m-%Y")" -if [ $# -ge 1 ]; then - SUBJECT="$1 $(date +"%d-%m-%Y")" -fi - -if [ $# -ge 2 ]; then - LIST="$2" -fi - -stdin="$(cat)" -#echo used to strip whitespace for checking for actual data -if [ -n "$(echo $stdin)" ]; then - - echo "Subject: $SUBJECT -To: $LIST -From: $FROM - -$stdin" | /usr/sbin/sendmail -F"$FROM" "$LIST" - -fi diff --git a/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup b/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup deleted file mode 100755 index 4063c09..0000000 --- a/extra/lukeshu-xbs/cron-jobs/ftpdir-cleanup +++ /dev/null @@ -1,96 +0,0 @@ -#!/bin/bash - -. "$(dirname "$(readlink -e "$0")")/../config" -. "$(dirname "$(readlink -e "$0")")/../db-functions" - -clean_pkg() { - local pkg - local target - - if ! "${CLEANUP_DRYRUN}"; then - for pkg in "$@"; do - if [ -h "$pkg" ]; then - rm -f "$pkg" "$pkg.sig" - else - mv_acl "$pkg" "$CLEANUP_DESTDIR/${pkg##*/}" - if [ -e "$pkg.sig" ]; then - mv_acl "$pkg.sig" "$CLEANUP_DESTDIR/${pkg##*/}.sig" - fi - touch "${CLEANUP_DESTDIR}/${pkg##*/}" - fi - done - fi -} - -script_lock - -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - repo_lock "${repo}" "${arch}" || exit 1 - done -done - -"${CLEANUP_DRYRUN}" && warning 'dry run mode is active' - -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then - continue - fi - # get a list of actual available package files - find "${FTP_BASE}/${repo}/os/${arch}" -xtype f -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/repo-${repo}-${arch}" - # get a list of package files defined in the repo db - bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" | awk '/^%FILENAME%/{getline;print}' | sort > "${WORKDIR}/db-${repo}-${arch}" - - missing_pkgs=($(comm -13 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}")) - if [ ${#missing_pkgs[@]} -ge 1 ]; then - error "Missing packages in [%s] (%s)..." "${repo}" "${arch}" - for missing_pkg in "${missing_pkgs[@]}"; do - msg2 '%s' "${missing_pkg}" - done - fi - - old_pkgs=($(comm -23 "${WORKDIR}/repo-${repo}-${arch}" "${WORKDIR}/db-${repo}-${arch}")) - if [ ${#old_pkgs[@]} -ge 1 ]; then - msg "Removing old packages from [%s] (%s)..." "${repo}" "${arch}" - for old_pkg in "${old_pkgs[@]}"; do - msg2 '%s' "${old_pkg}" - clean_pkg "${FTP_BASE}/${repo}/os/${arch}/${old_pkg}" - done - fi - done -done - -# get a list of all available packages in the pacakge pool -find "$FTP_BASE/${PKGPOOL}" -name "*${PKGEXT}" -printf '%f\n' | sort > "${WORKDIR}/pool" -# create a list of packages in our db -find "${WORKDIR}" -maxdepth 1 -type f -name 'db-*' -exec cat {} \; | sort -u > "${WORKDIR}/db" - -old_pkgs=($(comm -23 "${WORKDIR}/pool" "${WORKDIR}/db")) -if [ ${#old_pkgs[@]} -ge 1 ]; then - msg "Removing old packages from package pool..." - for old_pkg in "${old_pkgs[@]}"; do - msg2 '%s' "${old_pkg}" - clean_pkg "$FTP_BASE/${PKGPOOL}/${old_pkg}" - done -fi - -old_pkgs=($(find "${CLEANUP_DESTDIR}" -type f -name "*${PKGEXT}" -mtime +"${CLEANUP_KEEP}" -printf '%f\n')) -if [ ${#old_pkgs[@]} -ge 1 ]; then - msg "Removing old packages from the cleanup directory..." - for old_pkg in "${old_pkgs[@]}"; do - msg2 '%s' "${old_pkg}" - if ! "${CLEANUP_DRYRUN}"; then - rm -f "${CLEANUP_DESTDIR}/${old_pkg}" - rm -f "${CLEANUP_DESTDIR}/${old_pkg}.sig" - fi - done -fi - -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - repo_unlock "${repo}" "${arch}" - done -done - -script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/integrity-check b/extra/lukeshu-xbs/cron-jobs/integrity-check deleted file mode 100755 index 7459380..0000000 --- a/extra/lukeshu-xbs/cron-jobs/integrity-check +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash - -dirname="$(dirname "$(readlink -e "$0")")" - -. "${dirname}/../config" -. "${dirname}/../db-functions" - -script_lock - -if [ $# -ne 1 ]; then - die "usage: %s " "${0##*/}" -fi -mailto=$1 - -check() { - "${dirname}"/check_archlinux/check_packages.py \ - --repos="${repos}" \ - --abs-tree="/srv/abs/rsync/${arch},/srv/abs/rsync/any" \ - --repo-dir="${FTP_BASE}" \ - --arch="${arch}" \ - 2>&1 | "${dirname}"/devlist-mailer "Integrity Check ${arch}: ${repos}" "${mailto}" -} - -repos='core,extra,community' -arch='i686' -check - -repos='core,extra,community,multilib' -arch='x86_64' -check - -script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/make_repo_torrents b/extra/lukeshu-xbs/cron-jobs/make_repo_torrents deleted file mode 100755 index 2eb0978..0000000 --- a/extra/lukeshu-xbs/cron-jobs/make_repo_torrents +++ /dev/null @@ -1,70 +0,0 @@ -#!/bin/bash -# Copyright (C) 2014 Joseph Graham -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -# This script finds any updated packages and calls -# `make_indivudual_torrent' for each of them. - -username=$( id -un ) - -case "${username}" in - repo | root ) - true - ;; - * ) - echo "This script must be run as repo user or root user." - echo "ByeBye!" - exit 1 - ;; -esac - -# pacman doesn't support multiple different packages of the same name, -# so it's OK to just stuff all the torrents into a single directory. -script_directory="$(dirname "$(readlink -e "$0")")/.." -. "$(dirname "$(readlink -e "$0")")/../config" -public_location="$FTP_BASE/" -torrent_location="$FTP_BASE/torrents/" - -cd "${torrent_location}" - -# Find any directories that might have packages in then -find "${public_location}" -name 'os' -type 'd' | -while read dir -do - # Find any packages - find "${dir}" -name '*\.pkg\.tar\.xz' | - while read pkg - do - pkg_name="${pkg##*/}" - - if [[ -h "${pkg}" ]] # check if it's a symbolic link - then - # We get the target of the symlink - pkg=$( readlink -f "${pkg}" ) - fi - - # If a .torrent file does not already exist for this package, we call - # `make_individual_torrent' to make it. - if ! [[ -f "${torrent_location}${pkg_name}.torrent" ]] - then - "$script_directory/make_individual_torrent" "${pkg}" "${public_location}" - fi - done -done - -if [[ "${username}" == root ]] -then - chown repo * -fi diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs b/extra/lukeshu-xbs/cron-jobs/sourceballs deleted file mode 100755 index c02912a..0000000 --- a/extra/lukeshu-xbs/cron-jobs/sourceballs +++ /dev/null @@ -1,150 +0,0 @@ -#!/bin/bash - -dirname="$(dirname "$(readlink -e "$0")")" -. "${dirname}/../config" -. "${dirname}/../db-functions" -pushd "${WORKDIR}" >/dev/null - -script_lock - -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - repo_lock "${repo}" "${arch}" || exit 1 - done -done - -#adjust the nice level to run at a lower priority -renice +10 -p $$ > /dev/null - -# Create a readable file for each repo with the following format -# - [ ] -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - # Repo does not exist; skip it - if [ ! -f "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" ]; then - continue - fi - bsdtar -xOf "${FTP_BASE}/${repo}/os/${arch}/${repo}${DBEXT}" \ - | awk '/^%NAME%/ { getline b }; - /^%BASE%/ { getline b }; - /^%VERSION%/ { getline v }; - /^%LICENSE%/,/^$/ { - if ( !/^%LICENSE%/ ) { l=l" "$0 } - }; - /^%ARCH%/ { - getline a; - printf "%s %s %s %s\n", b, v, a, l; - l=""; - }' - done | sort -u > "${WORKDIR}/db-${repo}" -done - -for repo in "${PKGREPOS[@]}"; do - for arch in "${ARCHES[@]}"; do - repo_unlock "${repo}" "${arch}" - done -done - -# Create a list of all available source package file names -find "${FTP_BASE}/${SRCPOOL}" -xtype f -name "*${SRCEXT}" -printf '%f\n' | sort -u > "${WORKDIR}/available-src-pkgs" - -# Check for all packages if we need to build a source package -for repo in "${PKGREPOS[@]}"; do - newpkgs=() - failedpkgs=() - while read line; do - pkginfo=("${line}") - pkgbase=${pkginfo[0]} - pkgver=${pkginfo[1]} - pkgarch=${pkginfo[2]} - pkglicense=("${pkginfo[@]:3}") - - # Should this package be skipped? - if grep -Fqx "${pkgbase}" "${dirname}/sourceballs.skip"; then - continue - fi - # Check if the license or .force file does not enforce creating a source package - if ! ([[ -z ${ALLOWED_LICENSES[*]} ]] || chk_license "${pkglicense[@]}" || grep -Fqx "${pkgbase}" "${dirname}/sourceballs.force"); then - continue - fi - # Store the expected file name of the source package - echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/expected-src-pkgs" - - # Build the source package if its not already there - if ! grep -Fqx "${pkgbase}-${pkgver}${SRCEXT}" "${WORKDIR}/available-src-pkgs"; then - # Check if we had failed before - if in_array "${pkgbase}-${pkgver}${SRCEXT}" "${failedpkgs[@]}"; then - continue - fi - - # Get the sources from xbs - mkdir -p -m0770 "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}" - cp -a "$(xbs releasepath "${pkgbase}" "${repo}" "${pkgarch}")" \ - "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null 2>&1 - if [ $? -ge 1 ]; then - failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") - continue - fi - - # Build the actual source package - pushd "${WORKDIR}/pkgbuilds/${repo}-${pkgarch}/${pkgbase}" >/dev/null - SRCPKGDEST=. makepkg --nocolor --allsource --ignorearch --skippgpcheck >"${WORKDIR}/${pkgbase}.log" 2>&1 - if [ $? -eq 0 ] && [ -f "${pkgbase}-${pkgver}${SRCEXT}" ]; then - mv_acl "${pkgbase}-${pkgver}${SRCEXT}" "${FTP_BASE}/${SRCPOOL}/${pkgbase}-${pkgver}${SRCEXT}" - # Avoid creating the same source package for every arch - echo "${pkgbase}-${pkgver}${SRCEXT}" >> "${WORKDIR}/available-src-pkgs" - newpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") - else - failedpkgs+=("${pkgbase}-${pkgver}${SRCEXT}") - cat "${WORKDIR}/${pkgbase}.log" >> "${WORKDIR}/makepkg-fail.log" - fi - popd >/dev/null - fi - done < "${WORKDIR}/db-${repo}" - - if [ ${#newpkgs[@]} -ge 1 ]; then - msg "Adding source packages for [%s]..." "${repo}" - for new_pkg in "${newpkgs[@]}"; do - msg2 '%s' "${new_pkg}" - done - fi - if [ ${#failedpkgs[@]} -ge 1 ]; then - msg "Failed to create source packages for [%s]..." "${repo}" - for failed_pkg in "${failedpkgs[@]}"; do - msg2 '%s' "${failed_pkg}" - done - fi -done - -# Cleanup old source packages -find "${WORKDIR}" -maxdepth 1 -type f -name 'expected-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/expected-src-pkgs.sort" -find "${WORKDIR}" -maxdepth 1 -type f -name 'available-src-pkgs' -exec cat {} \; | sort -u > "${WORKDIR}/available-src-pkgs.sort" -old_pkgs=($(comm -23 "${WORKDIR}/available-src-pkgs.sort" "${WORKDIR}/expected-src-pkgs.sort")) - -if [ ${#old_pkgs[@]} -ge 1 ]; then - msg "Removing old source packages..." - "${SOURCE_CLEANUP_DRYRUN}" && warning 'dry run mode is active' - for old_pkg in "${old_pkgs[@]}"; do - msg2 '%s' "${old_pkg}" - if ! "${SOURCE_CLEANUP_DRYRUN}"; then - mv_acl "$FTP_BASE/${SRCPOOL}/${old_pkg}" "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" - touch "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" - fi - done -fi - -old_pkgs=($(find "${SOURCE_CLEANUP_DESTDIR}" -type f -name "*${SRCEXT}" -mtime +"${SOURCE_CLEANUP_KEEP}" -printf '%f\n')) -if [ ${#old_pkgs[@]} -ge 1 ]; then - msg "Removing old source packages from the cleanup directory..." - for old_pkg in "${old_pkgs[@]}"; do - msg2 '%s' "${old_pkg}" - "${SOURCE_CLEANUP_DRYRUN}" || rm -f "${SOURCE_CLEANUP_DESTDIR}/${old_pkg}" - done -fi - -if [ -f "${WORKDIR}/makepkg-fail.log" ]; then - msg "Log of failed packages" - cat "${WORKDIR}/makepkg-fail.log" -fi - -script_unlock diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs.force b/extra/lukeshu-xbs/cron-jobs/sourceballs.force deleted file mode 100644 index badf15d..0000000 --- a/extra/lukeshu-xbs/cron-jobs/sourceballs.force +++ /dev/null @@ -1,4 +0,0 @@ -faad2 -wxgtk -wxpython -glhack diff --git a/extra/lukeshu-xbs/cron-jobs/sourceballs.skip b/extra/lukeshu-xbs/cron-jobs/sourceballs.skip deleted file mode 100644 index 0e1731c..0000000 --- a/extra/lukeshu-xbs/cron-jobs/sourceballs.skip +++ /dev/null @@ -1,43 +0,0 @@ -0ad-data -alienarena-data -blobwars-data -btanks-data -dangerdeep-data -egoboo-data -fillets-ng-data -flightgear-data -frogatto-data -gcompris-data -naev-data -openarena-data -rocksndiamonds-data -smc-data -speed-dreams-data -torcs-data -tremulous-data -ufoai-data -vdrift-data -warmux-data -wesnoth-data -widelands-data -xonotic-data -texlive-bibtexextra -texlive-bin -texlive-core -texlive-fontsextra -texlive-formatsextra -texlive-games -texlive-genericextra -texlive-htmlxml -texlive-humanities -texlive-langcjk -texlive-langcyrillic -texlive-langextra -texlive-langgreek -texlive-latexextra -texlive-music -texlive-pictures -texlive-plainextra -texlive-pstricks -texlive-publishers -texlive-science -- cgit v1.2.3-2-g168b