From 1feabf50777a6f0f4193f467faad9f996e729367 Mon Sep 17 00:00:00 2001 From: Mary Umoh Date: Wed, 23 Aug 2017 21:03:12 -0400 Subject: Bug 1393145 - See if we can add a directive/rule to robots.txt to get search engines to crawl more slowly --- template/en/default/robots.txt.tmpl | 1 + 1 file changed, 1 insertion(+) (limited to 'template/en/default/robots.txt.tmpl') diff --git a/template/en/default/robots.txt.tmpl b/template/en/default/robots.txt.tmpl index d8c1b5b86..c4948efe5 100644 --- a/template/en/default/robots.txt.tmpl +++ b/template/en/default/robots.txt.tmpl @@ -1,5 +1,6 @@ User-agent: * Disallow: / +Crawl-delay: 30 [% IF NOT urlbase.matches("bugzilla-dev") %] -- cgit v1.2.3-24-g4f1b