summaryrefslogtreecommitdiff
path: root/devel
diff options
context:
space:
mode:
Diffstat (limited to 'devel')
-rw-r--r--devel/admin.py12
-rw-r--r--devel/management/commands/generate_keyring.py33
-rw-r--r--devel/management/commands/reporead.py170
-rw-r--r--devel/migrations/0001_initial.py18
-rw-r--r--devel/migrations/0002_auto__add_masterkey.py76
-rw-r--r--devel/migrations/__init__.py0
-rw-r--r--devel/models.py20
7 files changed, 253 insertions, 76 deletions
diff --git a/devel/admin.py b/devel/admin.py
new file mode 100644
index 00000000..84082fb8
--- /dev/null
+++ b/devel/admin.py
@@ -0,0 +1,12 @@
+from django.contrib import admin
+
+from .models import MasterKey
+
+
+class MasterKeyAdmin(admin.ModelAdmin):
+ list_display = ('pgp_key', 'owner', 'created', 'revoker', 'revoked')
+ search_fields = ('pgp_key', 'owner', 'revoker')
+
+admin.site.register(MasterKey, MasterKeyAdmin)
+
+# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/generate_keyring.py b/devel/management/commands/generate_keyring.py
index 35ab8874..062c738b 100644
--- a/devel/management/commands/generate_keyring.py
+++ b/devel/management/commands/generate_keyring.py
@@ -13,6 +13,7 @@ import logging
import subprocess
import sys
+from devel.models import MasterKey
from main.models import UserProfile
logging.basicConfig(
@@ -23,7 +24,7 @@ logging.basicConfig(
logger = logging.getLogger()
class Command(BaseCommand):
- args = "<keyserver> <keyring_path>"
+ args = "<keyserver> <keyring_path> [ownertrust_path]"
help = "Assemble a GPG keyring with all known developer keys."
def handle(self, *args, **options):
@@ -35,10 +36,14 @@ class Command(BaseCommand):
elif v == 2:
logger.level = logging.DEBUG
- if len(args) != 2:
+ if len(args) < 2:
raise CommandError("keyserver and keyring_path must be provided")
- return generate_keyring(args[0], args[1])
+ generate_keyring(args[0], args[1])
+
+ if len(args) > 2:
+ generate_ownertrust(args[2])
+
def generate_keyring(keyserver, keyring):
logger.info("getting all known key IDs")
@@ -48,12 +53,34 @@ def generate_keyring(keyserver, keyring):
pgp_key__isnull=False).extra(where=["pgp_key != ''"]).values_list(
"pgp_key", flat=True)
logger.info("%d keys fetched from user profiles", len(key_ids))
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ logger.info("%d keys fetched from master keys", len(master_key_ids))
gpg_cmd = ["gpg", "--no-default-keyring", "--keyring", keyring,
"--keyserver", keyserver, "--recv-keys"]
logger.info("running command: %r", gpg_cmd)
gpg_cmd.extend(key_ids)
+ gpg_cmd.extend(master_key_ids)
subprocess.check_call(gpg_cmd)
logger.info("keyring at %s successfully updated", keyring)
+
+TRUST_LEVELS = {
+ 'unknown': 0,
+ 'expired': 1,
+ 'undefined': 2,
+ 'never': 3,
+ 'marginal': 4,
+ 'fully': 5,
+ 'ultimate': 6,
+}
+
+
+def generate_ownertrust(trust_path):
+ master_key_ids = MasterKey.objects.values_list("pgp_key", flat=True)
+ with open(trust_path, "w") as trustfile:
+ for key_id in master_key_ids:
+ trustfile.write("%s:%d:\n" % (key_id, TRUST_LEVELS['marginal']))
+ logger.info("trust file at %s created or overwritten", trust_path)
+
# vim: set ts=4 sw=4 et:
diff --git a/devel/management/commands/reporead.py b/devel/management/commands/reporead.py
index cf101d97..c444538b 100644
--- a/devel/management/commands/reporead.py
+++ b/devel/management/commands/reporead.py
@@ -282,40 +282,20 @@ def select_pkg_for_update(dbpkg):
return list(new_pkg)[0]
-def db_update(archname, reponame, pkgs, options):
- """
- Parses a list and updates the Arch dev database accordingly.
-
- Arguments:
- pkgs -- A list of Pkg objects.
-
- """
- logger.info('Updating Arch: %s', archname)
- force = options.get('force', False)
- filesonly = options.get('filesonly', False)
-
+def update_common(archname, reponame, pkgs, sanity_check=True):
with transaction.commit_manually():
repository = Repo.objects.get(name__iexact=reponame)
architecture = Arch.objects.get(name__iexact=archname)
# no-arg order_by() removes even the default ordering; we don't need it
dbpkgs = Package.objects.filter(
arch=architecture, repo=repository).order_by()
- # This makes our inner loop where we find packages by name *way* more
- # efficient by not having to go to the database for each package to
- # SELECT them by name.
- dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
-
- logger.debug("Creating sets")
- dbset = set(dbdict.keys())
- syncset = set([pkg.name for pkg in pkgs])
- logger.info("%d packages in current web DB", len(dbset))
- logger.info("%d packages in new updating db", len(syncset))
- in_sync_not_db = syncset - dbset
- logger.info("%d packages in sync not db", len(in_sync_not_db))
+
+ logger.info("%d packages in current web DB", len(dbpkgs))
+ logger.info("%d packages in new updating DB", len(pkgs))
# Try to catch those random package deletions that make Eric so unhappy.
- if len(dbset):
- dbpercent = 100.0 * len(syncset) / len(dbset)
+ if len(dbpkgs):
+ dbpercent = 100.0 * len(pkgs) / len(dbpkgs)
else:
dbpercent = 0.0
logger.info("DB package ratio: %.1f%%", dbpercent)
@@ -324,11 +304,13 @@ def db_update(archname, reponame, pkgs, options):
# means we expect the repo to fluctuate a lot.
msg = "Package database has %.1f%% the number of packages in the " \
"web database" % dbpercent
- if len(dbset) == 0 and len(syncset) == 0:
+ if not sanity_check:
+ pass
+ elif repository.testing or repository.staging:
pass
- elif not filesonly and \
- len(dbset) > 20 and dbpercent < 50.0 and \
- not repository.testing and not repository.staging:
+ elif len(dbpkgs) == 0 and len(pkgs) == 0:
+ pass
+ elif len(dbpkgs) > 20 and dbpercent < 50.0:
logger.error(msg)
raise Exception(msg)
elif dbpercent < 75.0:
@@ -339,27 +321,47 @@ def db_update(archname, reponame, pkgs, options):
# to guard against simultaneous updates
transaction.commit()
- if not filesonly:
- # packages in syncdb and not in database (add to database)
- for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db):
- logger.info("Adding package %s", pkg.name)
- dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository)
- try:
- with transaction.commit_on_success():
- populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow())
- except IntegrityError:
- logger.warning("Could not add package %s; "
- "not fatal if another thread beat us to it.",
- pkg.name, exc_info=True)
-
- # packages in database and not in syncdb (remove from database)
- for pkgname in (dbset - syncset):
- logger.info("Removing package %s", pkgname)
- dbpkg = dbdict[pkgname]
+ return dbpkgs
+
+def db_update(archname, reponame, pkgs, force=False):
+ """
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, True)
+ repository = Repo.objects.get(name__iexact=reponame)
+ architecture = Arch.objects.get(name__iexact=archname)
+
+ # This makes our inner loop where we find packages by name *way* more
+ # efficient by not having to go to the database for each package to
+ # SELECT them by name.
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
+
+ dbset = set(dbdict.keys())
+ syncset = set([pkg.name for pkg in pkgs])
+
+ in_sync_not_db = syncset - dbset
+ logger.info("%d packages in sync not db", len(in_sync_not_db))
+ # packages in syncdb and not in database (add to database)
+ for pkg in (pkg for pkg in pkgs if pkg.name in in_sync_not_db):
+ logger.info("Adding package %s", pkg.name)
+ dbpkg = Package(pkgname=pkg.name, arch=architecture, repo=repository)
+ try:
with transaction.commit_on_success():
- # no race condition here as long as simultaneous threads both
- # issue deletes; second delete will be a no-op
- dbpkg.delete()
+ populate_pkg(dbpkg, pkg, timestamp=datetime.utcnow())
+ except IntegrityError:
+ logger.warning("Could not add package %s; "
+ "not fatal if another thread beat us to it.",
+ pkg.name, exc_info=True)
+
+ # packages in database and not in syncdb (remove from database)
+ for pkgname in (dbset - syncset):
+ logger.info("Removing package %s", pkgname)
+ dbpkg = dbdict[pkgname]
+ with transaction.commit_on_success():
+ # no race condition here as long as simultaneous threads both
+ # issue deletes; second delete will be a no-op
+ dbpkg.delete()
# packages in both database and in syncdb (update in database)
pkg_in_both = syncset & dbset
@@ -369,9 +371,7 @@ def db_update(archname, reponame, pkgs, options):
timestamp = None
# for a force, we don't want to update the timestamp.
# for a non-force, we don't want to do anything at all.
- if filesonly:
- pass
- elif pkg_same_version(pkg, dbpkg):
+ if pkg_same_version(pkg, dbpkg):
if not force:
continue
else:
@@ -380,26 +380,45 @@ def db_update(archname, reponame, pkgs, options):
# The odd select_for_update song and dance here are to ensure
# simultaneous updates don't happen on a package, causing
# files/depends/all related items to be double-imported.
- if filesonly:
- with transaction.commit_on_success():
- # TODO Django 1.4 select_for_update() will work once released
- dbpkg = select_pkg_for_update(dbpkg)
- if pkg_same_version(pkg, dbpkg):
- logger.debug("Package %s was already updated", pkg.name)
- continue
- logger.debug("Checking files for package %s", pkg.name)
- populate_files(dbpkg, pkg, force=force)
- else:
- with transaction.commit_on_success():
- # TODO Django 1.4 select_for_update() will work once released
- dbpkg = select_pkg_for_update(dbpkg)
- if pkg_same_version(pkg, dbpkg):
- logger.debug("Package %s was already updated", pkg.name)
- continue
- logger.info("Updating package %s", pkg.name)
- populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp)
+ with transaction.commit_on_success():
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ if pkg_same_version(pkg, dbpkg):
+ logger.debug("Package %s was already updated", pkg.name)
+ continue
+ logger.info("Updating package %s", pkg.name)
+ populate_pkg(dbpkg, pkg, force=force, timestamp=timestamp)
+
+ logger.info('Finished updating arch: %s', archname)
+
+
+def filesonly_update(archname, reponame, pkgs, force=False):
+ """
+ Parses a list of packages and updates the packages database accordingly.
+ """
+ logger.info('Updating files for %s (%s)', reponame, archname)
+ dbpkgs = update_common(archname, reponame, pkgs, False)
+ dbdict = dict((dbpkg.pkgname, dbpkg) for dbpkg in dbpkgs)
+ dbset = set(dbdict.keys())
+
+ for pkg in (pkg for pkg in pkgs if pkg.name in dbset):
+ dbpkg = dbdict[pkg.name]
+
+ # The odd select_for_update song and dance here are to ensure
+ # simultaneous updates don't happen on a package, causing
+ # files to be double-imported.
+ with transaction.commit_on_success():
+ if not dbpkg.files_last_update or not dbpkg.last_update:
+ pass
+ elif dbpkg.files_last_update > dbpkg.last_update:
+ logger.debug("Files for %s are up to date", pkg.name)
+ continue
+ # TODO Django 1.4 select_for_update() will work once released
+ dbpkg = select_pkg_for_update(dbpkg)
+ logger.debug("Checking files for package %s", pkg.name)
+ populate_files(dbpkg, pkg, force=force)
- logger.info('Finished updating Arch: %s', archname)
+ logger.info('Finished updating arch: %s', archname)
def parse_info(iofile):
@@ -488,6 +507,8 @@ def read_repo(primary_arch, repo_file, options):
"""
# always returns an Arch object, regardless of what is passed in
primary_arch = locate_arch(primary_arch)
+ force = options.get('force', False)
+ filesonly = options.get('filesonly', False)
repo, packages = parse_repo(repo_file)
@@ -507,7 +528,10 @@ def read_repo(primary_arch, repo_file, options):
logger.info('Starting database updates for %s.', repo_file)
for arch in sorted(packages_arches.keys()):
- db_update(arch, repo, packages_arches[arch], options)
+ if filesonly:
+ filesonly_update(arch, repo, packages_arches[arch], force)
+ else:
+ db_update(arch, repo, packages_arches[arch], force)
logger.info('Finished database updates for %s.', repo_file)
return 0
diff --git a/devel/migrations/0001_initial.py b/devel/migrations/0001_initial.py
new file mode 100644
index 00000000..c28fc20f
--- /dev/null
+++ b/devel/migrations/0001_initial.py
@@ -0,0 +1,18 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ def forwards(self, orm):
+ pass
+
+
+ def backwards(self, orm):
+ pass
+
+
+ models = {}
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/0002_auto__add_masterkey.py b/devel/migrations/0002_auto__add_masterkey.py
new file mode 100644
index 00000000..ac1f745a
--- /dev/null
+++ b/devel/migrations/0002_auto__add_masterkey.py
@@ -0,0 +1,76 @@
+# encoding: utf-8
+from south.db import db
+from south.v2 import SchemaMigration
+from django.db import models
+
+class Migration(SchemaMigration):
+
+ depends_on = (
+ ("main", "0051_auto__chg_field_userprofile_pgp_key"),
+ )
+
+ def forwards(self, orm):
+ # Adding model 'MasterKey'
+ db.create_table('devel_masterkey', (
+ ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
+ ('owner', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_owner', to=orm['auth.User'])),
+ ('revoker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='masterkey_revoker', to=orm['auth.User'])),
+ ('pgp_key', self.gf('main.fields.PGPKeyField')(max_length=40)),
+ ('created', self.gf('django.db.models.fields.DateTimeField')()),
+ ('revoked', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
+ ))
+ db.send_create_signal('devel', ['MasterKey'])
+
+ def backwards(self, orm):
+ db.delete_table('devel_masterkey')
+
+
+ models = {
+ 'auth.group': {
+ 'Meta': {'object_name': 'Group'},
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
+ 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
+ },
+ 'auth.permission': {
+ 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
+ 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
+ },
+ 'auth.user': {
+ 'Meta': {'object_name': 'User'},
+ 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
+ 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
+ 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
+ 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
+ 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
+ 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
+ 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
+ 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
+ },
+ 'contenttypes.contenttype': {
+ 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
+ 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
+ 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
+ },
+ 'devel.masterkey': {
+ 'Meta': {'object_name': 'MasterKey'},
+ 'created': ('django.db.models.fields.DateTimeField', [], {}),
+ 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
+ 'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_owner'", 'to': "orm['auth.User']"}),
+ 'pgp_key': ('main.fields.PGPKeyField', [], {'max_length': '40'}),
+ 'revoked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
+ 'revoker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'masterkey_revoker'", 'to': "orm['auth.User']"})
+ }
+ }
+
+ complete_apps = ['devel']
diff --git a/devel/migrations/__init__.py b/devel/migrations/__init__.py
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/devel/migrations/__init__.py
diff --git a/devel/models.py b/devel/models.py
index e69de29b..f31b8fbb 100644
--- a/devel/models.py
+++ b/devel/models.py
@@ -0,0 +1,20 @@
+from django.db import models
+from django.contrib.auth.models import User
+
+from main.fields import PGPKeyField
+
+
+class MasterKey(models.Model):
+ owner = models.ForeignKey(User, related_name='masterkey_owner',
+ help_text="The developer holding this master key")
+ revoker = models.ForeignKey(User, related_name='masterkey_revoker',
+ help_text="The developer holding the revocation certificate")
+ pgp_key = PGPKeyField(max_length=40, verbose_name="PGP key fingerprint",
+ help_text="consists of 40 hex digits; use `gpg --fingerprint`")
+ created = models.DateTimeField()
+ revoked = models.DateTimeField(null=True, blank=True)
+
+ class Meta:
+ ordering = ('created',)
+
+# vim: set ts=4 sw=4 et: