Skip to content

Commit

Permalink
Minor cleanups and nitpicks.
Browse files Browse the repository at this point in the history
  • Loading branch information
jezdez committed Dec 7, 2011
1 parent 5fd060f commit b1d0e88
Show file tree
Hide file tree
Showing 4 changed files with 19 additions and 10 deletions.
2 changes: 1 addition & 1 deletion LICENSE.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
Copyright (c) 2008-2010, Jannis Leidel
Copyright (c) 2008-2011, Jannis Leidel
All rights reserved.

Redistribution and use in source and binary forms, with or without
Expand Down
12 changes: 9 additions & 3 deletions robots/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,22 @@
from robots.models import Url, Rule
from robots.forms import RuleAdminForm


class RuleAdmin(admin.ModelAdmin):
form = RuleAdminForm
fieldsets = (
(None, {'fields': ('robot', 'sites')}),
(_('URL patterns'), {'fields': ('allowed', 'disallowed')}),
(_('Advanced options'), {'classes': ('collapse',), 'fields': ('crawl_delay',)}),
(_('URL patterns'), {
'fields': ('allowed', 'disallowed'),
}),
(_('Advanced options'), {
'classes': ('collapse',),
'fields': ('crawl_delay',),
}),
)
list_filter = ('sites',)
list_display = ('robot', 'allowed_urls', 'disallowed_urls')
search_fields = ('robot','urls')
search_fields = ('robot', 'urls')

admin.site.register(Url)
admin.site.register(Rule, RuleAdmin)
7 changes: 5 additions & 2 deletions robots/forms.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,14 @@

from robots.models import Rule


class RuleAdminForm(forms.ModelForm):
class Meta:
model = Rule

def clean(self):
if not self.cleaned_data.get("disallowed", False) and not self.cleaned_data.get("allowed", False):
raise forms.ValidationError(_('Please specify at least one allowed or dissallowed URL.'))
if (not self.cleaned_data.get("disallowed", False) and
not self.cleaned_data.get("allowed", False)):
raise forms.ValidationError(
_('Please specify at least one allowed or dissallowed URL.'))
return self.cleaned_data
8 changes: 4 additions & 4 deletions robots/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@

#: A list of one or more sitemaps to inform robots about:
SITEMAP_URLS = []
SITEMAP_URLS.extend(getattr(settings,'ROBOTS_SITEMAP_URLS', []))
SITEMAP_URLS.extend(getattr(settings, 'ROBOTS_SITEMAP_URLS', []))

# For backwards-compatibility, we'll automatically add a single URL
# to the list:
SITEMAP_URL = getattr(settings,'ROBOTS_SITEMAP_URL', None)
SITEMAP_URL = getattr(settings, 'ROBOTS_SITEMAP_URL', None)
if SITEMAP_URL is not None:
warn("The ``SITEMAP_URL`` setting is deprecated. Use ``SITEMAP_URLS`` instead.",
PendingDeprecationWarning)
warn("The ``SITEMAP_URL`` setting is deprecated. "
"Use ``SITEMAP_URLS`` instead.", PendingDeprecationWarning)
SITEMAP_URLS.append(SITEMAP_URL)

USE_SITEMAP = getattr(settings, 'ROBOTS_USE_SITEMAP', True)
Expand Down

0 comments on commit b1d0e88

Please sign in to comment.