summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorLuke Shumaker <lukeshu@parabola.nu>2019-04-04 23:41:40 -0400
committerLuke Shumaker <lukeshu@parabola.nu>2019-04-04 23:41:40 -0400
commitf013c8d8cb06f7c993bbc41519de5ab7dd2ce0c4 (patch)
treeaa1c56be991f8584b47f62d695cb88941fdce638
parentb5ed13a7eccddf71fb48d3fe8679f70d786d3fe5 (diff)
parent7f91c4effbd96b51de2b63323a5ea63b19573f48 (diff)
Merge branch 'archweb-generic' into lukeshu/masterHEADparabolaweb-2019-04-07masterlukeshu/master
-rw-r--r--.gitignore3
-rw-r--r--.travis.yml7
-rw-r--r--README.md17
-rw-r--r--devel/reports.py156
-rw-r--r--devel/views.py143
-rw-r--r--mirrors/tests.py62
-rw-r--r--packages/migrations/0003_auto_20170524_0704.py19
-rw-r--r--releng/migrations/0004_auto_20170524_0704.py29
-rw-r--r--releng/models.py4
-rw-r--r--requirements_prod.txt11
-rw-r--r--settings.py49
-rw-r--r--templates/mirrors/status_table.html4
-rw-r--r--templates/releng/thanks.html2
13 files changed, 317 insertions, 189 deletions
diff --git a/.gitignore b/.gitignore
index 1fe06092..0d673a61 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,6 @@ tags
collected_static/
testing/
env/
+
+# rope
+.ropeproject/
diff --git a/.travis.yml b/.travis.yml
index 81cef987..4757641f 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,8 +1,11 @@
language: python
python:
- "2.7"
-install: "pip install -r requirements.txt"
+install: "pip install -r requirements.txt && pip install coveralls"
script:
- python manage.py collectstatic --noinput
- - python manage.py test
+ - coverage run --omit='env*' --source='.' manage.py test
+
+after_success:
+ - coveralls
diff --git a/README.md b/README.md
index 377821e0..62577b40 100644
--- a/README.md
+++ b/README.md
@@ -20,6 +20,9 @@ but I have given up on maintaining it for parabolaweb.
# Archweb README
+[![Build Status](https://travis-ci.org/archlinux/archweb.svg?branch=master)](https://travis-ci.org/archlinux/archweb)
+[![Coverage Status](https://coveralls.io/repos/github/archlinux/archweb/badge.svg?branch=master)](https://coveralls.io/github/archlinux/archweb?branch=master)
+
To get a pretty version of this document, run
$ markdown README > README.html
@@ -105,6 +108,20 @@ required. A simple debugging SMTP server can be setup using Python.
In local_settings.py change the EMAIL_HOST to 'localhost' and the EMAIL_PORT to
1025.
+# Running tests and coverage
+
+To the unittests execute the following commands:
+
+ ./manage.py collectstatic --noinput
+ ./manage.py test
+
+Running coverage:
+
+ pip install coverage
+ coverage run --omit='env*' --source='.' manage.py test
+ coverage report
+
+
# Production Installation
Ask someone who knows, or you are going to be in trouble.
diff --git a/devel/reports.py b/devel/reports.py
index b754b264..3d8b0fbd 100644
--- a/devel/reports.py
+++ b/devel/reports.py
@@ -1,17 +1,24 @@
from datetime import timedelta
-import pytz
+import pytz
from django.db.models import F
from django.template.defaultfilters import filesizeformat
from django.utils.timezone import now
+from main.models import PackageFile
+from packages.models import Depend, PackageRelation
from .models import DeveloperKey
-from main.models import PackageFile
-from packages.models import PackageRelation, Depend
+
class DeveloperReport(object):
- def __init__(self, slug, name, desc, packages_func,
- names=None, attrs=None, personal=True):
+ def __init__(self,
+ slug,
+ name,
+ desc,
+ packages_func,
+ names=None,
+ attrs=None,
+ personal=True):
self.slug = slug
self.name = name
self.description = desc
@@ -23,41 +30,38 @@ class DeveloperReport(object):
def old(packages):
cutoff = now() - timedelta(days=365 * 2)
- return packages.filter(
- build_date__lt=cutoff).order_by('build_date')
+ return packages.filter(build_date__lt=cutoff).order_by('build_date')
def outofdate(packages):
cutoff = now() - timedelta(days=30)
- return packages.filter(
- flag_date__lt=cutoff).order_by('flag_date')
+ return packages.filter(flag_date__lt=cutoff).order_by('flag_date')
def big(packages):
cutoff = 50 * 1024 * 1024
packages = packages.filter(
- compressed_size__gte=cutoff).order_by('-compressed_size')
+ compressed_size__gte=cutoff).order_by('-compressed_size')
# Format the compressed and installed sizes with MB/GB/etc suffixes
for package in packages:
package.compressed_size_pretty = filesizeformat(
package.compressed_size)
- package.installed_size_pretty = filesizeformat(
- package.installed_size)
+ package.installed_size_pretty = filesizeformat(package.installed_size)
return packages
def badcompression(packages):
cutoff = 0.90 * F('installed_size')
- packages = packages.filter(compressed_size__gt=25*1024,
- installed_size__gt=25*1024,
- compressed_size__gte=cutoff).order_by('-compressed_size')
+ packages = packages.filter(
+ compressed_size__gt=25 * 1024,
+ installed_size__gt=25 * 1024,
+ compressed_size__gte=cutoff).order_by('-compressed_size')
# Format the compressed and installed sizes with MB/GB/etc suffixes
for package in packages:
package.compressed_size_pretty = filesizeformat(
package.compressed_size)
- package.installed_size_pretty = filesizeformat(
- package.installed_size)
+ package.installed_size_pretty = filesizeformat(package.installed_size)
ratio = package.compressed_size / float(package.installed_size)
package.ratio = '%.3f' % ratio
package.compress_type = package.filename.split('.')[-1]
@@ -67,18 +71,17 @@ def badcompression(packages):
def uncompressed_man(packages, username):
# checking for all '.0'...'.9' + '.n' extensions
- bad_files = PackageFile.objects.filter(is_directory=False,
- directory__contains='/man/',
- filename__regex=r'\.[0-9n]').exclude(
- filename__endswith='.gz').exclude(
+ bad_files = PackageFile.objects.filter(
+ is_directory=False,
+ directory__contains='/man/',
+ filename__regex=r'\.[0-9n]').exclude(filename__endswith='.gz').exclude(
filename__endswith='.xz').exclude(
- filename__endswith='.bz2').exclude(
- filename__endswith='.html')
+ filename__endswith='.bz2').exclude(filename__endswith='.html')
if username:
pkg_ids = set(packages.values_list('id', flat=True))
bad_files = bad_files.filter(pkg__in=pkg_ids)
- bad_files = bad_files.values_list(
- 'pkg_id', flat=True).order_by().distinct()
+ bad_files = bad_files.values_list('pkg_id',
+ flat=True).order_by().distinct()
return packages.filter(id__in=set(bad_files))
@@ -86,12 +89,13 @@ def uncompressed_info(packages, username):
# we don't worry about looking for '*.info-1', etc., given that an
# uncompressed root page probably exists in the package anyway
bad_files = PackageFile.objects.filter(is_directory=False,
- directory__endswith='/info/', filename__endswith='.info')
+ directory__endswith='/info/',
+ filename__endswith='.info')
if username:
pkg_ids = set(packages.values_list('id', flat=True))
bad_files = bad_files.filter(pkg__in=pkg_ids)
- bad_files = bad_files.values_list(
- 'pkg_id', flat=True).order_by().distinct()
+ bad_files = bad_files.values_list('pkg_id',
+ flat=True).order_by().distinct()
return packages.filter(id__in=set(bad_files))
@@ -99,16 +103,15 @@ def unneeded_orphans(packages):
owned = PackageRelation.objects.all().values('pkgbase')
required = Depend.objects.all().values('name')
# The two separate calls to exclude is required to do the right thing
- return packages.exclude(pkgbase__in=owned).exclude(
- pkgname__in=required)
+ return packages.exclude(pkgbase__in=owned).exclude(pkgname__in=required)
def mismatched_signature(packages):
filtered = []
packages = packages.select_related(
- 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
- known_keys = DeveloperKey.objects.select_related(
- 'owner').filter(owner__isnull=False)
+ 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
+ known_keys = DeveloperKey.objects.select_related('owner').filter(
+ owner__isnull=False)
known_keys = {dk.key: dk for dk in known_keys}
for package in packages:
bad = False
@@ -131,7 +134,7 @@ def signature_time(packages):
cutoff = timedelta(hours=24)
filtered = []
packages = packages.select_related(
- 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
+ 'arch', 'repo', 'packager').filter(signature_bytes__isnull=False)
for package in packages:
sig = package.signature
sig_date = sig.creation_time.replace(tzinfo=pytz.utc)
@@ -142,57 +145,60 @@ def signature_time(packages):
return filtered
-REPORT_OLD = DeveloperReport('old', 'Old',
- 'Packages last built more than two years ago', old)
+REPORT_OLD = DeveloperReport(
+ 'old', 'Old', 'Packages last built more than two years ago', old)
-REPORT_OUTOFDATE = DeveloperReport('long-out-of-date', 'Long Out-of-date',
- 'Packages marked out-of-date more than 30 days ago', outofdate)
+REPORT_OUTOFDATE = DeveloperReport(
+ 'long-out-of-date', 'Long Out-of-date',
+ 'Packages marked out-of-date more than 30 days ago', outofdate)
-REPORT_BIG = DeveloperReport('big', 'Big',
- 'Packages with compressed size > 50 MiB', big,
- ['Compressed Size', 'Installed Size'],
- ['compressed_size_pretty', 'installed_size_pretty'])
+REPORT_BIG = DeveloperReport(
+ 'big', 'Big', 'Packages with compressed size > 50 MiB', big,
+ ['Compressed Size', 'Installed Size'],
+ ['compressed_size_pretty', 'installed_size_pretty'])
-REPORT_BADCOMPRESS = DeveloperReport('badcompression', 'Bad Compression',
- 'Packages > 25 KiB with a compression ratio < 10%', badcompression,
- ['Compressed Size', 'Installed Size', 'Ratio', 'Type'],
- ['compressed_size_pretty', 'installed_size_pretty','ratio', 'compress_type'])
+REPORT_BADCOMPRESS = DeveloperReport(
+ 'badcompression', 'Bad Compression',
+ 'Packages > 25 KiB with a compression ratio < 10%', badcompression,
+ ['Compressed Size', 'Installed Size', 'Ratio', 'Type'],
+ ['compressed_size_pretty', 'installed_size_pretty', 'ratio',
+ 'compress_type'])
REPORT_MAN = DeveloperReport('uncompressed-man', 'Uncompressed Manpages',
- 'Packages with uncompressed manpages', uncompressed_man)
+ 'Packages with uncompressed manpages',
+ uncompressed_man)
REPORT_INFO = DeveloperReport('uncompressed-info', 'Uncompressed Info Pages',
- 'Packages with uncompressed info pages', uncompressed_info)
+ 'Packages with uncompressed info pages',
+ uncompressed_info)
-REPORT_ORPHANS = DeveloperReport('unneeded-orphans', 'Unneeded Orphans',
- 'Packages that have no maintainer and are not required by any '
- + 'other package in any repository', unneeded_orphans,
- personal=False)
+REPORT_ORPHANS = DeveloperReport(
+ 'unneeded-orphans',
+ 'Unneeded Orphans',
+ 'Packages that have no maintainer and are not required by any ' +
+ 'other package in any repository',
+ unneeded_orphans,
+ personal=False)
-REPORT_SIGNATURE = DeveloperReport('mismatched-signature',
- 'Mismatched Signatures',
- 'Packages where the signing key is unknown or signer != packager',
- mismatched_signature,
- ['Signed By', 'Packager'],
- ['sig_by', 'packager'])
+REPORT_SIGNATURE = DeveloperReport(
+ 'mismatched-signature', 'Mismatched Signatures',
+ 'Packages where the signing key is unknown or signer != packager',
+ mismatched_signature, ['Signed By', 'Packager'], ['sig_by', 'packager'])
-REPORT_SIG_TIME = DeveloperReport('signature-time', 'Signature Time',
- 'Packages where the signature timestamp is more than 24 hours '
- + 'after the build timestamp',
- signature_time,
- ['Signature Date', 'Packager'],
- ['sig_date', 'packager'])
+REPORT_SIG_TIME = DeveloperReport(
+ 'signature-time', 'Signature Time',
+ 'Packages where the signature timestamp is more than 24 hours ' +
+ 'after the build timestamp', signature_time,
+ ['Signature Date', 'Packager'], ['sig_date', 'packager'])
def available_reports():
- return (
- REPORT_OLD,
- REPORT_OUTOFDATE,
- REPORT_BIG,
- REPORT_BADCOMPRESS,
- REPORT_MAN,
- REPORT_INFO,
- REPORT_ORPHANS,
- REPORT_SIGNATURE,
- REPORT_SIG_TIME,
- )
+ return (REPORT_OLD,
+ REPORT_OUTOFDATE,
+ REPORT_BIG,
+ REPORT_BADCOMPRESS,
+ REPORT_MAN,
+ REPORT_INFO,
+ REPORT_ORPHANS,
+ REPORT_SIGNATURE,
+ REPORT_SIG_TIME, )
diff --git a/devel/views.py b/devel/views.py
index 66f6a965..e86c60c1 100644
--- a/devel/views.py
+++ b/devel/views.py
@@ -1,33 +1,32 @@
-from datetime import timedelta
import operator
import time
+from datetime import timedelta
-from django.http import HttpResponseRedirect
-from django.contrib.auth.decorators import \
- login_required, permission_required, user_passes_test
from django.contrib import admin
-from django.contrib.admin.models import LogEntry, ADDITION
+from django.contrib.admin.models import ADDITION, LogEntry
+from django.contrib.auth.decorators import (login_required,
+ permission_required,
+ user_passes_test)
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
from django.db.models import Count, Max
-from django.http import Http404
+from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
-from django.views.decorators.cache import never_cache
from django.utils.encoding import force_unicode
from django.utils.http import http_date
from django.utils.timezone import now
-
-from .forms import ProfileForm, UserProfileForm, NewUserForm
-from .models import UserProfile
-from .reports import available_reports
-from main.models import Package
-from main.models import Arch, Repo
+from django.views.decorators.cache import never_cache
+from main.models import Arch, Package, Repo
from news.models import News
-from packages.models import PackageRelation, Signoff, FlagRequest
+from packages.models import FlagRequest, PackageRelation, Signoff
from packages.utils import get_signoff_groups
from todolists.models import TodolistPackage
from todolists.utils import get_annotated_todolists
+
+from .forms import NewUserForm, ProfileForm, UserProfileForm
+from .models import UserProfile
+from .reports import available_reports
from .utils import get_annotated_maintainers
@@ -41,25 +40,26 @@ def index(request):
inner_q = inner_q.values('pkgbase')
flagged = Package.objects.normal().filter(
- flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname')
+ flag_date__isnull=False, pkgbase__in=inner_q).order_by('pkgname')
todopkgs = TodolistPackage.objects.select_related(
- 'todolist', 'pkg', 'arch', 'repo').exclude(
+ 'todolist', 'pkg', 'arch', 'repo').exclude(
status=TodolistPackage.COMPLETE).filter(removed__isnull=True)
- todopkgs = todopkgs.filter(pkgbase__in=inner_q).order_by(
- 'todolist__name', 'pkgname')
+ todopkgs = todopkgs.filter(pkgbase__in=inner_q).order_by('todolist__name',
+ 'pkgname')
todolists = get_annotated_todolists(incomplete_only=True)
- signoffs = sorted(get_signoff_groups(user=request.user),
- key=operator.attrgetter('pkgbase'))
+ signoffs = sorted(
+ get_signoff_groups(user=request.user),
+ key=operator.attrgetter('pkgbase'))
page_dict = {
- 'todos': todolists,
- 'flagged': flagged,
- 'todopkgs': todopkgs,
- 'signoffs': signoffs,
- 'reports': available_reports(),
+ 'todos': todolists,
+ 'flagged': flagged,
+ 'todopkgs': todopkgs,
+ 'signoffs': signoffs,
+ 'reports': available_reports(),
}
return render(request, 'devel/index.html', page_dict)
@@ -69,35 +69,37 @@ def index(request):
def stats(request):
"""The second half of the dev dashboard."""
arches = Arch.objects.all().annotate(
- total_ct=Count('packages'), flagged_ct=Count('packages__flag_date'))
+ total_ct=Count('packages'),
+ flagged_ct=Count('packages__flag_date'))
repos = Repo.objects.all().annotate(
- total_ct=Count('packages'), flagged_ct=Count('packages__flag_date'))
+ total_ct=Count('packages'),
+ flagged_ct=Count('packages__flag_date'))
# the join is huge unless we do this separately, so merge the result here
repo_maintainers = dict(Repo.objects.order_by().filter(
- userprofile__user__is_active=True).values_list('id').annotate(
- Count('userprofile')))
+ userprofile__user__is_active=True).values_list('id').annotate(Count(
+ 'userprofile')))
for repo in repos:
repo.maintainer_ct = repo_maintainers.get(repo.id, 0)
maintainers = get_annotated_maintainers()
maintained = PackageRelation.objects.filter(
- type=PackageRelation.MAINTAINER).values('pkgbase')
+ type=PackageRelation.MAINTAINER).values('pkgbase')
total_orphans = Package.objects.exclude(pkgbase__in=maintained).count()
total_flagged_orphans = Package.objects.filter(
- flag_date__isnull=False).exclude(pkgbase__in=maintained).count()
+ flag_date__isnull=False).exclude(pkgbase__in=maintained).count()
total_updated = Package.objects.filter(packager__isnull=True).count()
orphan = {
- 'package_count': total_orphans,
- 'flagged_count': total_flagged_orphans,
- 'updated_count': total_updated,
+ 'package_count': total_orphans,
+ 'flagged_count': total_flagged_orphans,
+ 'updated_count': total_updated,
}
page_dict = {
- 'arches': arches,
- 'repos': repos,
- 'maintainers': maintainers,
- 'orphan': orphan,
+ 'arches': arches,
+ 'repos': repos,
+ 'maintainers': maintainers,
+ 'orphan': orphan,
}
return render(request, 'devel/stats.html', page_dict)
@@ -106,25 +108,23 @@ def stats(request):
@login_required
def clock(request):
devs = User.objects.filter(is_active=True).order_by(
- 'first_name', 'last_name').select_related('userprofile')
+ 'first_name', 'last_name').select_related('userprofile')
- latest_news = dict(News.objects.filter(
- author__is_active=True).values_list('author').order_by(
- ).annotate(last_post=Max('postdate')))
+ latest_news = dict(News.objects.filter(author__is_active=True).values_list(
+ 'author').order_by().annotate(last_post=Max('postdate')))
latest_package = dict(Package.objects.filter(
- packager__is_active=True).values_list('packager').order_by(
- ).annotate(last_build=Max('build_date')))
+ packager__is_active=True).values_list('packager').order_by().annotate(
+ last_build=Max('build_date')))
latest_signoff = dict(Signoff.objects.filter(
- user__is_active=True).values_list('user').order_by(
- ).annotate(last_signoff=Max('created')))
+ user__is_active=True).values_list('user').order_by().annotate(
+ last_signoff=Max('created')))
# The extra() bit ensures we can use our 'user_id IS NOT NULL' index
latest_flagreq = dict(FlagRequest.objects.filter(
- user__is_active=True).extra(
- where=['user_id IS NOT NULL']).values_list('user_id').order_by(
- ).annotate(last_flagrequest=Max('created')))
+ user__is_active=True).extra(where=['user_id IS NOT NULL']).values_list(
+ 'user_id').order_by().annotate(last_flagrequest=Max('created')))
latest_log = dict(LogEntry.objects.filter(
- user__is_active=True).values_list('user').order_by(
- ).annotate(last_log=Max('action_time')))
+ user__is_active=True).values_list('user').order_by().annotate(
+ last_log=Max('action_time')))
for dev in devs:
dates = [
@@ -142,10 +142,7 @@ def clock(request):
dev.last_action = None
current_time = now()
- page_dict = {
- 'developers': devs,
- 'utc_now': current_time,
- }
+ page_dict = {'developers': devs, 'utc_now': current_time, }
response = render(request, 'devel/clock.html', page_dict)
if not response.has_header('Expires'):
@@ -162,8 +159,9 @@ def change_profile(request):
profile, _ = UserProfile.objects.get_or_create(user=request.user)
if request.POST:
form = ProfileForm(request.POST)
- profile_form = UserProfileForm(request.POST, request.FILES,
- instance=profile)
+ profile_form = UserProfileForm(request.POST,
+ request.FILES,
+ instance=profile)
if form.is_valid() and profile_form.is_valid():
request.user.email = form.cleaned_data['email']
if form.cleaned_data['passwd1']:
@@ -176,7 +174,8 @@ def change_profile(request):
form = ProfileForm(initial={'email': request.user.email})
profile_form = UserProfileForm(instance=profile)
return render(request, 'devel/profile.html',
- {'form': form, 'profile_form': profile_form})
+ {'form': form,
+ 'profile_form': profile_form})
@login_required
@@ -190,14 +189,18 @@ def report(request, report_name, username=None):
user = None
if username:
user = get_object_or_404(User, username=username, is_active=True)
- maintained = PackageRelation.objects.filter(user=user,
- type=PackageRelation.MAINTAINER).values('pkgbase')
+ maintained = PackageRelation.objects.filter(
+ user=user, type=PackageRelation.MAINTAINER).values('pkgbase')
packages = packages.filter(pkgbase__in=maintained)
maints = User.objects.filter(id__in=PackageRelation.objects.filter(
type=PackageRelation.MAINTAINER).values('user'))
- packages = report.packages(packages, username)
+ if report.slug == 'uncompressed-man' or report.slug == 'uncompressed-info':
+ packages = report.packages(packages, username)
+ else:
+ packages = report.packages(packages)
+
arches = {pkg.arch for pkg in packages}
repos = {pkg.repo for pkg in packages}
context = {
@@ -217,13 +220,12 @@ def report(request, report_name, username=None):
def log_addition(request, obj):
"""Cribbed from ModelAdmin.log_addition."""
LogEntry.objects.log_action(
- user_id = request.user.pk,
- content_type_id = ContentType.objects.get_for_model(obj).pk,
- object_id = obj.pk,
- object_repr = force_unicode(obj),
- action_flag = ADDITION,
- change_message = "Added via Create New User form."
- )
+ user_id=request.user.pk,
+ content_type_id=ContentType.objects.get_for_model(obj).pk,
+ object_id=obj.pk,
+ object_repr=force_unicode(obj),
+ action_flag=ADDITION,
+ change_message="Added via Create New User form.")
@permission_required('auth.add_user')
@@ -257,10 +259,7 @@ def admin_log(request, username=None):
user = None
if username:
user = get_object_or_404(User, username=username)
- context = {
- 'title': "Admin Action Log",
- 'log_user': user,
- }
+ context = {'title': "Admin Action Log", 'log_user': user, }
context.update(admin.site.each_context())
return render(request, 'devel/admin_log.html', context)
diff --git a/mirrors/tests.py b/mirrors/tests.py
new file mode 100644
index 00000000..30b2d730
--- /dev/null
+++ b/mirrors/tests.py
@@ -0,0 +1,62 @@
+import json
+
+from django.test import TestCase
+
+from models import MirrorUrl, MirrorProtocol, Mirror
+
+def create_mirror_url():
+ mirror = Mirror.objects.create(name='mirror1', admin_email='admin@archlinux.org')
+ mirror_protocol = MirrorProtocol.objects.create(protocol='http')
+ mirror_url = MirrorUrl.objects.create(url='https://archlinux.org', protocol=mirror_protocol,
+ mirror=mirror, country='US')
+ return mirror_url
+
+class MirrorUrlTest(TestCase):
+ def setUp(self):
+ self.mirror_url = create_mirror_url()
+
+ def testAddressFamilies(self):
+ self.assertEqual(self.mirror_url.address_families(), [2, 10])
+
+ def testHostname(self):
+ self.assertEqual(self.mirror_url.hostname, 'archlinux.org')
+
+ def testGetAbsoluteUrl(self):
+ absolute_url = self.mirror_url.get_absolute_url()
+ expected = '/mirrors/%s/%d/' % (self.mirror_url.mirror.name, self.mirror_url.pk)
+ self.assertEqual(absolute_url, expected)
+
+ def testClean(self):
+ # TODO: add test for self.mirror_url.clean()
+ pass
+
+ def tearDown(self):
+ self.mirror_url.delete()
+
+class MirrorStatusTest(TestCase):
+ def test_status(self):
+ response = self.client.get('/mirrors/status/')
+ self.assertEqual(response.status_code, 200)
+
+ def test_json_endpoint(self):
+ response = self.client.get('/mirrors/status/json/')
+ self.assertEqual(response.status_code, 200)
+ data = json.loads(response.content)
+ self.assertEqual(data['urls'], [])
+
+ mirror_url = create_mirror_url()
+
+ # Verify that the cache works
+ response = self.client.get('/mirrors/status/json/')
+ self.assertEqual(response.status_code, 200)
+ data = json.loads(response.content)
+
+ # Disables the cache_function's cache
+ with self.settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}):
+ response = self.client.get('/mirrors/status/json/')
+ self.assertEqual(response.status_code, 200)
+ data = json.loads(response.content)
+
+ self.assertEqual(len(data['urls']), 1)
+ mirror = data['urls'][0]
+ self.assertEqual(mirror['url'], mirror_url.url)
diff --git a/packages/migrations/0003_auto_20170524_0704.py b/packages/migrations/0003_auto_20170524_0704.py
new file mode 100644
index 00000000..997f329e
--- /dev/null
+++ b/packages/migrations/0003_auto_20170524_0704.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('packages', '0002_auto_20160731_0556'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='flagrequest',
+ name='user_email',
+ field=models.EmailField(max_length=254, verbose_name=b'email address'),
+ ),
+ ]
diff --git a/releng/migrations/0004_auto_20170524_0704.py b/releng/migrations/0004_auto_20170524_0704.py
new file mode 100644
index 00000000..fe4e6a66
--- /dev/null
+++ b/releng/migrations/0004_auto_20170524_0704.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+from __future__ import unicode_literals
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('releng', '0003_release_populate_last_modified'),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name='test',
+ name='modules',
+ field=models.ManyToManyField(to='releng.Module', blank=True),
+ ),
+ migrations.AlterField(
+ model_name='test',
+ name='rollback_modules',
+ field=models.ManyToManyField(related_name='rollback_test_set', to='releng.Module', blank=True),
+ ),
+ migrations.AlterField(
+ model_name='test',
+ name='user_email',
+ field=models.EmailField(max_length=254, verbose_name=b'email address'),
+ ),
+ ]
diff --git a/releng/models.py b/releng/models.py
index 9b7f8c90..3f54f956 100644
--- a/releng/models.py
+++ b/releng/models.py
@@ -101,12 +101,12 @@ class Test(models.Model):
source = models.ForeignKey(Source)
clock_choice = models.ForeignKey(ClockChoice)
filesystem = models.ForeignKey(Filesystem)
- modules = models.ManyToManyField(Module, null=True, blank=True)
+ modules = models.ManyToManyField(Module, blank=True)
bootloader = models.ForeignKey(Bootloader)
rollback_filesystem = models.ForeignKey(Filesystem,
related_name="rollback_test_set", null=True, blank=True)
rollback_modules = models.ManyToManyField(Module,
- related_name="rollback_test_set", null=True, blank=True)
+ related_name="rollback_test_set", blank=True)
success = models.BooleanField(default=True)
comments = models.TextField(null=True, blank=True)
diff --git a/requirements_prod.txt b/requirements_prod.txt
index 4b95d788..ecee1292 100644
--- a/requirements_prod.txt
+++ b/requirements_prod.txt
@@ -1,13 +1,4 @@
--e git+git://github.com/fredj/cssmin.git@master#egg=cssmin
-Django==1.8.18
-IPy==0.83
-Markdown==2.6.2
-MarkupSafe==0.23
-bencode==1.0
-django-countries==3.3
-jsmin==2.1.1
-pgpdump==1.5
+-r requirements.txt
psycopg2==2.6.1
pyinotify==0.9.6
python-memcached==1.54
-pytz>=2015.4
diff --git a/settings.py b/settings.py
index 12148766..3190d8c4 100644
--- a/settings.py
+++ b/settings.py
@@ -1,5 +1,5 @@
-import os
# Django settings for parabolaweb project.
+from os import path
## Set the debug values
DEBUG = False
@@ -15,7 +15,7 @@ MANAGERS = ADMINS
NOTIFICATIONS = ['dev@lists.parabola.nu']
# Full path to the data directory
-DEPLOY_PATH = os.path.dirname(os.path.realpath(__file__))
+DEPLOY_PATH = path.dirname(path.realpath(__file__))
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
@@ -44,27 +44,6 @@ LOGIN_REDIRECT_URL = '/'
# Set django's User stuff to use our profile model
AUTH_PROFILE_MODULE = 'devel.UserProfile'
-from os import path
-TEMPLATES = [
- {
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'DIRS': [
- path.join(DEPLOY_PATH, 'templates')
- ],
- 'APP_DIRS': True,
- 'OPTIONS': {
- 'debug': DEBUG,
- 'context_processors': [
- 'django.contrib.auth.context_processors.auth',
- 'django.core.context_processors.debug',
- 'django.contrib.messages.context_processors.messages',
- 'main.context_processors.secure',
- 'main.context_processors.branding',
- ],
- }
- }
-]
-
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
@@ -82,11 +61,11 @@ ROOT_URLCONF = 'urls'
STATIC_URL = '/static/'
# Location to collect static files
-STATIC_ROOT = os.path.join(DEPLOY_PATH, 'collected_static')
+STATIC_ROOT = path.join(DEPLOY_PATH, 'collected_static')
# Look for more static files in these locations
STATICFILES_DIRS = (
- os.path.join(DEPLOY_PATH, 'sitestatic'),
+ path.join(DEPLOY_PATH, 'sitestatic'),
)
# Static files backend that allows us to use far future Expires headers
@@ -222,6 +201,26 @@ try:
except ImportError:
pass
+TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [
+ path.join(DEPLOY_PATH, 'templates')
+ ],
+ 'APP_DIRS': True,
+ 'OPTIONS': {
+ 'debug': DEBUG,
+ 'context_processors': [
+ 'django.contrib.auth.context_processors.auth',
+ 'django.core.context_processors.debug',
+ 'django.contrib.messages.context_processors.messages',
+ 'main.context_processors.secure',
+ 'main.context_processors.branding',
+ ],
+ }
+ }
+]
+
# Enable the debug toolbar if requested
if DEBUG_TOOLBAR:
MIDDLEWARE_CLASSES = \
diff --git a/templates/mirrors/status_table.html b/templates/mirrors/status_table.html
index 3f8cd2d9..1effe5a2 100644
--- a/templates/mirrors/status_table.html
+++ b/templates/mirrors/status_table.html
@@ -14,12 +14,12 @@
</tr>
</thead>
<tbody>
- {% for m_url in urls %}<tr class="{% cycle 'odd' 'even' }}">
+ {% for m_url in urls %}<tr class="{% cycle 'odd' 'even' %}">
<td>{{ m_url.url }}</td>
<td>{{ m_url.protocol }}</td>
<td class="country">{% country_flag m_url.country %}{{ m_url.country.name }}</td>
<td>{{ m_url.completion_pct|percentage:1 }}</td>
- <td>{{ m_url.delay|duration|default:unknown }}</td>
+ <td>{{ m_url.delay|duration }}</td>
<td>{{ m_url.duration_avg|floatvalue:2 }}</td>
<td>{{ m_url.duration_stddev|floatvalue:2 }}</td>
<td>{{ m_url.score|floatvalue:1|default:'∞' }}</td>
diff --git a/templates/releng/thanks.html b/templates/releng/thanks.html
index b772fad3..d85ad6a9 100644
--- a/templates/releng/thanks.html
+++ b/templates/releng/thanks.html
@@ -6,7 +6,7 @@
<div class="box">
<h2>Thanks!</h2>
<p>Thank you for taking the time to give us this information!
- Your results have been succesfully added to our database.</p>
+ Your results have been successfully added to our database.</p>
<p>You can now <a href="{% url 'releng-test-overview' %}">go back to the results</a>,
<a href="{% url 'releng-test-submit' %}">give more feedback</a>, or
have a look at the <a href="{% url 'releng-iso-overview' %}">look at