summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDan McGee <dan@archlinux.org>2014-02-22 20:38:59 +0100
committerDan McGee <dan@archlinux.org>2014-02-22 20:57:02 +0100
commit7947d36c4e509a917941a34c576fde3a207a4439 (patch)
tree52c9d71e03e758d27c8325c488a3c3603207484b
parent16b22e4bfc2e271a58a79a9fb4ccb5e059c6d62a (diff)
downloadarchweb-7947d36c4e509a917941a34c576fde3a207a4439.tar.gz
archweb-7947d36c4e509a917941a34c576fde3a207a4439.tar.xz
Break out developer reports into a separate module
This code was getting quite unwieldy, and wasn't very modular. Introduce a DeveloperReport class that contains the content for a single report, and utilize it to create our various report metadata and package filtering operations. Utilize these report objects in the reports view, vastly simplifying it. We don't yet dynamically generate the list of reports on the developer index page; that will be coming soon. Signed-off-by: Dan McGee <dan@archlinux.org>
-rw-r--r--devel/reports.py176
-rw-r--r--devel/views.py137
2 files changed, 191 insertions, 122 deletions
diff --git a/devel/reports.py b/devel/reports.py
new file mode 100644
index 0000000..f1ffab3
--- /dev/null
+++ b/devel/reports.py
@@ -0,0 +1,176 @@
+from datetime import timedelta
+import pytz
+
+from django.db.models import F
+from django.template.defaultfilters import filesizeformat
+from django.utils.timezone import now
+
+from .models import DeveloperKey, UserProfile
+from main.models import PackageFile
+from packages.models import PackageRelation, Depend
+
+class DeveloperReport(object):
+ def __init__(self, slug, name, desc, packages_func,
+ names=None, attrs=None):
+ self.slug = slug
+ self.name = name
+ self.description = desc
+ self.packages = packages_func
+ self.names = names
+ self.attrs = attrs
+
+
+def old(packages, username):
+ cutoff = now() - timedelta(days=365 * 2)
+ return packages.filter(
+ build_date__lt=cutoff).order_by('build_date')
+
+
+def outofdate(packages, username):
+ cutoff = now() - timedelta(days=30)
+ return packages.filter(
+ flag_date__lt=cutoff).order_by('flag_date')
+
+
+def big(packages, username):
+ cutoff = 50 * 1024 * 1024
+ packages = packages.filter(
+ compressed_size__gte=cutoff).order_by('-compressed_size')
+ # Format the compressed and installed sizes with MB/GB/etc suffixes
+ for package in packages:
+ package.compressed_size_pretty = filesizeformat(
+ package.compressed_size)
+ package.installed_size_pretty = filesizeformat(
+ package.installed_size)
+ return packages
+
+
+def badcompression(packages, username):
+ cutoff = 0.90 * F('installed_size')
+ packages = packages.filter(compressed_size__gt=0, installed_size__gt=0,
+ compressed_size__gte=cutoff).order_by('-compressed_size')
+
+ # Format the compressed and installed sizes with MB/GB/etc suffixes
+ for package in packages:
+ package.compressed_size_pretty = filesizeformat(
+ package.compressed_size)
+ package.installed_size_pretty = filesizeformat(
+ package.installed_size)
+ ratio = package.compressed_size / float(package.installed_size)
+ package.ratio = '%.3f' % ratio
+ package.compress_type = package.filename.split('.')[-1]
+
+ return packages
+
+
+def uncompressed_man(packages, username):
+ # checking for all '.0'...'.9' + '.n' extensions
+ bad_files = PackageFile.objects.filter(is_directory=False,
+ directory__contains='/man/',
+ filename__regex=r'\.[0-9n]').exclude(
+ filename__endswith='.gz').exclude(
+ filename__endswith='.xz').exclude(
+ filename__endswith='.bz2').exclude(
+ filename__endswith='.html')
+ if username:
+ pkg_ids = set(packages.values_list('id', flat=True))
+ bad_files = bad_files.filter(pkg__in=pkg_ids)
+ bad_files = bad_files.values_list(
+ 'pkg_id', flat=True).order_by().distinct()
+ return packages.filter(id__in=set(bad_files))
+
+
+def uncompressed_info(packages, username):
+ # we don't worry about looking for '*.info-1', etc., given that an
+ # uncompressed root page probably exists in the package anyway
+ bad_files = PackageFile.objects.filter(is_directory=False,
+ directory__endswith='/info/', filename__endswith='.info')
+ if username:
+ pkg_ids = set(packages.values_list('id', flat=True))
+ bad_files = bad_files.filter(pkg__in=pkg_ids)
+ bad_files = bad_files.values_list(
+ 'pkg_id', flat=True).order_by().distinct()
+ return packages.filter(id__in=set(bad_files))
+
+
+def unneeded_orphans(packages, username):
+ owned = PackageRelation.objects.all().values('pkgbase')
+ required = Depend.objects.all().values('name')
+ # The two separate calls to exclude is required to do the right thing
+ return packages.exclude(pkgbase__in=owned).exclude(
+ pkgname__in=required)
+
+
+def mismatched_signature(packages, username):
+ cutoff = timedelta(hours=24)
+ filtered = []
+ packages = packages.select_related(
+ 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
+ known_keys = DeveloperKey.objects.select_related(
+ 'owner').filter(owner__isnull=False)
+ known_keys = {dk.key: dk for dk in known_keys}
+ for package in packages:
+ bad = False
+ sig = package.signature
+ sig_date = sig.creation_time.replace(tzinfo=pytz.utc)
+ package.sig_date = sig_date.date()
+ dev_key = known_keys.get(sig.key_id, None)
+ if dev_key:
+ package.sig_by = dev_key.owner
+ if dev_key.owner_id != package.packager_id:
+ bad = True
+ else:
+ package.sig_by = sig.key_id
+ bad = True
+
+ if sig_date > package.build_date + cutoff:
+ bad = True
+
+ if bad:
+ filtered.append(package)
+ return filtered
+
+
+REPORT_OLD = DeveloperReport('old', 'Old',
+ 'Packages last built more than two years ago', old)
+
+REPORT_OUTOFDATE = DeveloperReport('long-out-of-date', 'Long Out-of-date',
+ 'Packages marked out-of-date more than 30 days ago', outofdate)
+
+REPORT_BIG = DeveloperReport('big', 'Big',
+ 'Packages with compressed size > 50 MiB', big,
+ ['Compressed Size', 'Installed Size'],
+ ['compressed_size_pretty', 'installed_size_pretty'])
+
+REPORT_BADCOMPRESS = DeveloperReport('badcompression', 'Bad Compression',
+ 'Packages that have little need for compression', badcompression,
+ ['Compressed Size', 'Installed Size', 'Ratio', 'Type'],
+ ['compressed_size_pretty', 'installed_size_pretty','ratio', 'compress_type'])
+
+
+REPORT_MAN = DeveloperReport('uncompressed-man', 'Uncompressed Manpages',
+ 'Packages with uncompressed manpages', uncompressed_man)
+
+REPORT_INFO = DeveloperReport('uncompressed-info', 'Uncompressed Info Pages',
+ 'Packages with uncompressed infopages', uncompressed_info)
+
+REPORT_ORPHANS = DeveloperReport('unneeded-orphans', 'Unneeded Orphans',
+ 'Orphan packages required by no other packages', unneeded_orphans)
+
+REPORT_SIGNATURE = DeveloperReport('mismatched-signature', 'Mismatched Signatures',
+ 'Packages with mismatched signatures', mismatched_signature,
+ ['Signature Date', 'Signed By', 'Packager'],
+ ['sig_date', 'sig_by', 'packager'])
+
+
+def available_reports():
+ return (
+ REPORT_OLD,
+ REPORT_OUTOFDATE,
+ REPORT_BIG,
+ REPORT_BADCOMPRESS,
+ REPORT_MAN,
+ REPORT_INFO,
+ REPORT_ORPHANS,
+ REPORT_SIGNATURE,
+ )
diff --git a/devel/views.py b/devel/views.py
index 1e20a43..cd2e25f 100644
--- a/devel/views.py
+++ b/devel/views.py
@@ -1,6 +1,5 @@
from datetime import timedelta
import operator
-import pytz
import time
from django.http import HttpResponseRedirect
@@ -10,21 +9,21 @@ from django.contrib.admin.models import LogEntry, ADDITION
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
-from django.db.models import F, Count, Max
+from django.db.models import Count, Max
from django.http import Http404
from django.shortcuts import get_object_or_404, render
-from django.template.defaultfilters import filesizeformat
from django.views.decorators.cache import never_cache
from django.utils.encoding import force_unicode
from django.utils.http import http_date
from django.utils.timezone import now
from .forms import ProfileForm, UserProfileForm, NewUserForm
-from .models import DeveloperKey, UserProfile
-from main.models import Package, PackageFile
+from .models import UserProfile
+from .reports import available_reports
+from main.models import Package
from main.models import Arch, Repo
from news.models import News
-from packages.models import PackageRelation, Signoff, FlagRequest, Depend
+from packages.models import PackageRelation, Signoff, FlagRequest
from packages.utils import get_signoff_groups
from todolists.models import TodolistPackage
from todolists.utils import get_annotated_todolists
@@ -180,10 +179,13 @@ def change_profile(request):
@login_required
def report(request, report_name, username=None):
- title = 'Developer Report'
- packages = Package.objects.normal()
- names = attrs = user = None
+ available = {report.slug: report for report in available_reports()}
+ report = available.get(report_name, None)
+ if report is None:
+ raise Http404
+ packages = Package.objects.normal()
+ user = None
if username:
user = get_object_or_404(User, username=username, is_active=True)
maintained = PackageRelation.objects.filter(user=user,
@@ -193,126 +195,17 @@ def report(request, report_name, username=None):
maints = User.objects.filter(id__in=PackageRelation.objects.filter(
type=PackageRelation.MAINTAINER).values('user'))
- if report_name == 'old':
- title = 'Packages last built more than two years ago'
- cutoff = now() - timedelta(days=365 * 2)
- packages = packages.filter(
- build_date__lt=cutoff).order_by('build_date')
- elif report_name == 'long-out-of-date':
- title = 'Packages marked out-of-date more than 30 days ago'
- cutoff = now() - timedelta(days=30)
- packages = packages.filter(
- flag_date__lt=cutoff).order_by('flag_date')
- elif report_name == 'big':
- title = 'Packages with compressed size > 50 MiB'
- cutoff = 50 * 1024 * 1024
- packages = packages.filter(
- compressed_size__gte=cutoff).order_by('-compressed_size')
- names = [ 'Compressed Size', 'Installed Size' ]
- attrs = [ 'compressed_size_pretty', 'installed_size_pretty' ]
- # Format the compressed and installed sizes with MB/GB/etc suffixes
- for package in packages:
- package.compressed_size_pretty = filesizeformat(
- package.compressed_size)
- package.installed_size_pretty = filesizeformat(
- package.installed_size)
- elif report_name == 'badcompression':
- title = 'Packages that have little need for compression'
- cutoff = 0.90 * F('installed_size')
- packages = packages.filter(compressed_size__gt=0, installed_size__gt=0,
- compressed_size__gte=cutoff).order_by('-compressed_size')
- names = [ 'Compressed Size', 'Installed Size', 'Ratio', 'Type' ]
- attrs = [ 'compressed_size_pretty', 'installed_size_pretty',
- 'ratio', 'compress_type' ]
- # Format the compressed and installed sizes with MB/GB/etc suffixes
- for package in packages:
- package.compressed_size_pretty = filesizeformat(
- package.compressed_size)
- package.installed_size_pretty = filesizeformat(
- package.installed_size)
- ratio = package.compressed_size / float(package.installed_size)
- package.ratio = '%.3f' % ratio
- package.compress_type = package.filename.split('.')[-1]
- elif report_name == 'uncompressed-man':
- title = 'Packages with uncompressed manpages'
- # checking for all '.0'...'.9' + '.n' extensions
- bad_files = PackageFile.objects.filter(is_directory=False,
- directory__contains='/man/',
- filename__regex=r'\.[0-9n]').exclude(
- filename__endswith='.gz').exclude(
- filename__endswith='.xz').exclude(
- filename__endswith='.bz2').exclude(
- filename__endswith='.html')
- if username:
- pkg_ids = set(packages.values_list('id', flat=True))
- bad_files = bad_files.filter(pkg__in=pkg_ids)
- bad_files = bad_files.values_list(
- 'pkg_id', flat=True).order_by().distinct()
- packages = packages.filter(id__in=set(bad_files))
- elif report_name == 'uncompressed-info':
- title = 'Packages with uncompressed infopages'
- # we don't worry about looking for '*.info-1', etc., given that an
- # uncompressed root page probably exists in the package anyway
- bad_files = PackageFile.objects.filter(is_directory=False,
- directory__endswith='/info/', filename__endswith='.info')
- if username:
- pkg_ids = set(packages.values_list('id', flat=True))
- bad_files = bad_files.filter(pkg__in=pkg_ids)
- bad_files = bad_files.values_list(
- 'pkg_id', flat=True).order_by().distinct()
- packages = packages.filter(id__in=set(bad_files))
- elif report_name == 'unneeded-orphans':
- title = 'Orphan packages required by no other packages'
- owned = PackageRelation.objects.all().values('pkgbase')
- required = Depend.objects.all().values('name')
- # The two separate calls to exclude is required to do the right thing
- packages = packages.exclude(pkgbase__in=owned).exclude(
- pkgname__in=required)
- elif report_name == 'mismatched-signature':
- title = 'Packages with mismatched signatures'
- names = [ 'Signature Date', 'Signed By', 'Packager' ]
- attrs = [ 'sig_date', 'sig_by', 'packager' ]
- cutoff = timedelta(hours=24)
- filtered = []
- packages = packages.select_related(
- 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
- known_keys = DeveloperKey.objects.select_related(
- 'owner').filter(owner__isnull=False)
- known_keys = {dk.key: dk for dk in known_keys}
- for package in packages:
- bad = False
- sig = package.signature
- sig_date = sig.creation_time.replace(tzinfo=pytz.utc)
- package.sig_date = sig_date.date()
- dev_key = known_keys.get(sig.key_id, None)
- if dev_key:
- package.sig_by = dev_key.owner
- if dev_key.owner_id != package.packager_id:
- bad = True
- else:
- package.sig_by = sig.key_id
- bad = True
-
- if sig_date > package.build_date + cutoff:
- bad = True
-
- if bad:
- filtered.append(package)
- packages = filtered
- else:
- raise Http404
-
arches = {pkg.arch for pkg in packages}
repos = {pkg.repo for pkg in packages}
context = {
'all_maintainers': maints,
- 'title': title,
+ 'title': report.description,
'maintainer': user,
- 'packages': packages,
+ 'packages': report.packages(packages, username),
'arches': sorted(arches),
'repos': sorted(repos),
- 'column_names': names,
- 'column_attrs': attrs,
+ 'column_names': report.names,
+ 'column_attrs': report.attrs,
}
return render(request, 'devel/packages.html', context)