From b1d0e88ad841567357f7865a88a48771c0d4dd89 Mon Sep 17 00:00:00 2001 From: Jannis Leidel Date: Wed, 7 Dec 2011 14:20:49 +0100 Subject: [PATCH] Minor cleanups and nitpicks. --- LICENSE.txt | 2 +- robots/admin.py | 12 +++++++++--- robots/forms.py | 7 +++++-- robots/settings.py | 8 ++++---- 4 files changed, 19 insertions(+), 10 deletions(-) diff --git a/LICENSE.txt b/LICENSE.txt index 14104f8..0e9acad 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -1,4 +1,4 @@ -Copyright (c) 2008-2010, Jannis Leidel +Copyright (c) 2008-2011, Jannis Leidel All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/robots/admin.py b/robots/admin.py index 461dd1c..008a02c 100644 --- a/robots/admin.py +++ b/robots/admin.py @@ -4,16 +4,22 @@ from robots.models import Url, Rule from robots.forms import RuleAdminForm + class RuleAdmin(admin.ModelAdmin): form = RuleAdminForm fieldsets = ( (None, {'fields': ('robot', 'sites')}), - (_('URL patterns'), {'fields': ('allowed', 'disallowed')}), - (_('Advanced options'), {'classes': ('collapse',), 'fields': ('crawl_delay',)}), + (_('URL patterns'), { + 'fields': ('allowed', 'disallowed'), + }), + (_('Advanced options'), { + 'classes': ('collapse',), + 'fields': ('crawl_delay',), + }), ) list_filter = ('sites',) list_display = ('robot', 'allowed_urls', 'disallowed_urls') - search_fields = ('robot','urls') + search_fields = ('robot', 'urls') admin.site.register(Url) admin.site.register(Rule, RuleAdmin) diff --git a/robots/forms.py b/robots/forms.py index 691798a..d051805 100644 --- a/robots/forms.py +++ b/robots/forms.py @@ -3,11 +3,14 @@ from robots.models import Rule + class RuleAdminForm(forms.ModelForm): class Meta: model = Rule def clean(self): - if not self.cleaned_data.get("disallowed", False) and not self.cleaned_data.get("allowed", False): - raise forms.ValidationError(_('Please specify at least one allowed or dissallowed URL.')) + if (not self.cleaned_data.get("disallowed", False) and + not self.cleaned_data.get("allowed", False)): + raise forms.ValidationError( + _('Please specify at least one allowed or dissallowed URL.')) return self.cleaned_data diff --git a/robots/settings.py b/robots/settings.py index ea8e308..e64ba62 100644 --- a/robots/settings.py +++ b/robots/settings.py @@ -3,14 +3,14 @@ #: A list of one or more sitemaps to inform robots about: SITEMAP_URLS = [] -SITEMAP_URLS.extend(getattr(settings,'ROBOTS_SITEMAP_URLS', [])) +SITEMAP_URLS.extend(getattr(settings, 'ROBOTS_SITEMAP_URLS', [])) # For backwards-compatibility, we'll automatically add a single URL # to the list: -SITEMAP_URL = getattr(settings,'ROBOTS_SITEMAP_URL', None) +SITEMAP_URL = getattr(settings, 'ROBOTS_SITEMAP_URL', None) if SITEMAP_URL is not None: - warn("The ``SITEMAP_URL`` setting is deprecated. Use ``SITEMAP_URLS`` instead.", - PendingDeprecationWarning) + warn("The ``SITEMAP_URL`` setting is deprecated. " + "Use ``SITEMAP_URLS`` instead.", PendingDeprecationWarning) SITEMAP_URLS.append(SITEMAP_URL) USE_SITEMAP = getattr(settings, 'ROBOTS_USE_SITEMAP', True)