aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorSeb Bacon <seb.bacon@gmail.com>2012-01-11 13:39:51 +0000
committerSeb Bacon <seb.bacon@gmail.com>2012-01-11 13:39:51 +0000
commit8f2fa1ee943d5c85b67e5817b3eb2dfd31e87821 (patch)
tree96ee5e05f82690e0e1b365213b6caea586e268d8
parentaf462d8b97021a422e684262c009fa173530d279 (diff)
Discourage spiders from crawling old, wrongly generated URLs as per issue #311.
-rw-r--r--public/robots.txt6
1 files changed, 6 insertions, 0 deletions
diff --git a/public/robots.txt b/public/robots.txt
index 029ae0dbf..6a8628c93 100644
--- a/public/robots.txt
+++ b/public/robots.txt
@@ -24,3 +24,9 @@ Disallow: /feed/
Disallow: /profile/
Disallow: /signin
Disallow: /body/*/view_email$
+
+# The following adding Jan 2012 to stop robots crawling pages
+# generated in error (see
+# https://github.com/sebbacon/alaveteli/issues/311). Can be removed
+# later in 2012 when the error pages have been dropped from the index
+Disallow: *.json.j*