Skip to content

Commit

Permalink
Merge pull request #6 from pbs/develop
Browse files Browse the repository at this point in the history
Removed the 404 status code and added Disallow: /  if the rules are not ...
  • Loading branch information
jezdez committed Nov 21, 2012
2 parents d6ade40 + 6dffa27 commit 3c6c18c
Show file tree
Hide file tree
Showing 3 changed files with 2 additions and 5 deletions.
2 changes: 1 addition & 1 deletion robots/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
VERSION = (0, 8, 1)
VERSION = (0, 8, 2)
__version__ = '.'.join(map(str, VERSION))
2 changes: 1 addition & 1 deletion robots/templates/robots/rule_list.html
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
{% endfor %}{% for url in rule.disallowed.all %}Disallow: {{ url.pattern }}
{% endfor %}{% if rule.crawl_delay %}Crawl-delay: {{ rule.crawl_delay }}
{% endif %}{% endfor %}{% else %}User-agent: *
Disallow:
Disallow: /
{% endif %}

{% for sitemap_url in sitemap_urls %}Sitemap: {{ sitemap_url }}
Expand Down
3 changes: 0 additions & 3 deletions robots/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,9 +37,6 @@ def rules_list(request, template_name='robots/rule_list.html',

rules = Rule.objects.filter(sites=current_site)

if not rules.count() and not sitemap_urls:
status_code = 404

t = loader.get_template(template_name)
c = RequestContext(request, {
'rules': rules,
Expand Down

0 comments on commit 3c6c18c

Please sign in to comment.