]> git.openstreetmap.org Git - osqa.git/commitdiff
fixes OSQA-390 by adding a default sitemap.xml to the robots file. This might not...
authormatt <matt@0cfe37f9-358a-4d5e-be75-b63607b5c754>
Fri, 16 Jul 2010 20:17:40 +0000 (20:17 +0000)
committermatt <matt@0cfe37f9-358a-4d5e-be75-b63607b5c754>
Fri, 16 Jul 2010 20:17:40 +0000 (20:17 +0000)
git-svn-id: http://svn.osqa.net/svnroot/osqa/trunk@543 0cfe37f9-358a-4d5e-be75-b63607b5c754

forum_modules/robotstxt/settings.py

index c2620a26c65d31468121a0fb344ecd2c6d1685c2..44d576aea5f29029884970cd4a91ed18854878b7 100644 (file)
@@ -1,10 +1,13 @@
 from forum.settings.base import Setting, SettingSet
 from django.forms.widgets import Textarea
+from django.core.urlresolvers import reverse
+from forum.settings import APP_URL
 
 ROBOTS_SET = SettingSet('robots', 'Robots txt', "Set up the robots.txt file.", 3000)
 
 ROBOTS_FILE = Setting('ROBOTS_FILE',
-"""
+"""Sitemap: %s/sitemap.xml
+
 User-Agent: *
 Disallow: /accounts/
 Disallow: /users/
@@ -12,7 +15,7 @@ Disallow: /revisions/
 Disallow: /search
 Disallow: /matching_tags
 Disallow: /*sort=
-""", ROBOTS_SET, dict(
+""" % APP_URL, ROBOTS_SET, dict(
 label = "Robots.txt file",
 help_text = """
 The robots.txt file search engine spiders will see.