diff options
author | Mary Umoh <umohm12@gmail.com> | 2017-08-08 23:12:08 +0200 |
---|---|---|
committer | Dylan William Hardison <dylan@hardison.net> | 2017-08-08 23:12:08 +0200 |
commit | 63dd3061a164345095847e502bb5395bb5570c3a (patch) | |
tree | 3022c22ceb767f6e360d62124d558c2accf86a44 | |
parent | c4db6d0065992417859f6b8db11e0fcad12051d5 (diff) | |
download | bugzilla-63dd3061a164345095847e502bb5395bb5570c3a.tar.gz bugzilla-63dd3061a164345095847e502bb5395bb5570c3a.tar.xz |
Bug 1386336 - Generate robots.txt at checksetup.pl from a template
-rw-r--r-- | Bugzilla/Install/Filesystem.pm | 18 | ||||
-rw-r--r-- | extensions/SiteMapIndex/Extension.pm | 38 | ||||
-rw-r--r-- | extensions/SiteMapIndex/robots.txt | 10 | ||||
-rw-r--r-- | extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl | 2 | ||||
-rw-r--r-- | template/en/default/robots.txt.tmpl (renamed from robots.txt) | 3 |
5 files changed, 21 insertions, 50 deletions
diff --git a/Bugzilla/Install/Filesystem.pm b/Bugzilla/Install/Filesystem.pm index adc1815c1..3114d64be 100644 --- a/Bugzilla/Install/Filesystem.pm +++ b/Bugzilla/Install/Filesystem.pm @@ -214,11 +214,10 @@ sub FILESYSTEM { 'Bugzilla.pm' => { perms => CGI_READ }, "$localconfig*" => { perms => CGI_READ }, - 'META.*' => { perms => CGI_READ }, - 'MYMETA.*' => { perms => CGI_READ }, + 'META.*' => { perms => CGI_READ }, + 'MYMETA.*' => { perms => CGI_READ }, 'bugzilla.dtd' => { perms => WS_SERVE }, 'mod_perl.pl' => { perms => WS_SERVE }, - 'robots.txt' => { perms => WS_SERVE }, '.htaccess' => { perms => WS_SERVE }, 'cvs-update.log' => { perms => WS_SERVE }, 'scripts/sendunsentbugmail.pl' => { perms => WS_EXECUTE }, @@ -405,6 +404,9 @@ sub FILESYSTEM { "skins/yui3.css" => { perms => CGI_READ, overwrite => 1, contents => $yui3_all_css }, + "robots.txt" => { perms => CGI_READ, + overwrite => 1, + contents => \&robots_txt}, ); # Because checksetup controls the creation of index.html separately @@ -952,6 +954,16 @@ sub _check_web_server_group { return $group_id; } +sub robots_txt { + my $output = ''; + my %vars; + Bugzilla::Hook::process("before_robots_txt", { vars => \%vars }); + Bugzilla->template->process("robots.txt.tmpl", \%vars, \$output) + or die Bugzilla->template->error; + return $output; +} + + 1; __END__ diff --git a/extensions/SiteMapIndex/Extension.pm b/extensions/SiteMapIndex/Extension.pm index 1e2010adb..a3f093485 100644 --- a/extensions/SiteMapIndex/Extension.pm +++ b/extensions/SiteMapIndex/Extension.pm @@ -90,9 +90,6 @@ sub install_before_final_checks { } return if (correct_urlbase() ne 'https://bugzilla.mozilla.org/'); - - - $self->_fix_robots_txt(); } sub install_filesystem { @@ -127,38 +124,9 @@ EOT }; } -sub _fix_robots_txt { - my ($self) = @_; - my $cgi_path = bz_locations()->{'cgi_path'}; - my $robots_file = "$cgi_path/robots.txt"; - my $current_fh = new IO::File("$cgi_path/robots.txt", 'r'); - if (!$current_fh) { - warn "$robots_file: $!"; - return; - } - - my $current_contents; - { local $/; $current_contents = <$current_fh> } - $current_fh->close(); - - return if $current_contents =~ /^Sitemap:/m; - my $backup_name = "$cgi_path/robots.txt.old"; - print get_text('sitemap_fixing_robots', { current => $robots_file, - backup => $backup_name }), "\n"; - rename $robots_file, $backup_name or die "backup failed: $!"; - - my $new_fh = new IO::File($self->package_dir . '/robots.txt', 'r'); - $new_fh || die "Could not open new robots.txt template file: $!"; - my $new_contents; - { local $/; $new_contents = <$new_fh> } - $new_fh->close() || die "Could not close new robots.txt template file: $!"; - - my $sitemap_url = correct_urlbase() . SITEMAP_URL; - $new_contents =~ s/SITEMAP_URL/$sitemap_url/; - $new_fh = new IO::File("$cgi_path/robots.txt", 'w'); - $new_fh || die "Could not open new robots.txt file: $!"; - print $new_fh $new_contents; - $new_fh->close() || die "Could not close new robots.txt file: $!"; +sub before_robots_txt { + my ($self, $args) = @_; + $args->{vars}{SITEMAP_URL} = correct_urlbase() . SITEMAP_URL; } __PACKAGE__->NAME; diff --git a/extensions/SiteMapIndex/robots.txt b/extensions/SiteMapIndex/robots.txt deleted file mode 100644 index 74cc63074..000000000 --- a/extensions/SiteMapIndex/robots.txt +++ /dev/null @@ -1,10 +0,0 @@ -User-agent: * -Disallow: /*.cgi -Disallow: /show_bug.cgi*ctype=* -Allow: /$ -Allow: /index.cgi -Allow: /page.cgi -Allow: /show_bug.cgi -Allow: /describecomponents.cgi -Allow: /data/SiteMapIndex/sitemap*.xml.gz -Sitemap: SITEMAP_URL diff --git a/extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl b/extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl new file mode 100644 index 000000000..818afb151 --- /dev/null +++ b/extensions/SiteMapIndex/template/en/default/hook/robots-end.txt.tmpl @@ -0,0 +1,2 @@ +Allow: /data/SiteMapIndex/sitemap*.xml.gz +Sitemap: [% SITEMAP_URL %] diff --git a/robots.txt b/template/en/default/robots.txt.tmpl index cececd632..762b000ec 100644 --- a/robots.txt +++ b/template/en/default/robots.txt.tmpl @@ -16,5 +16,4 @@ Disallow: /show_bug.cgi*format=multiple* Allow: /describecomponents.cgi Allow: /describekeywords.cgi -Allow: /data/SiteMapIndex/sitemap*.xml.gz -Sitemap: http://bugzilla.mozilla.org/page.cgi?id=sitemap/sitemap.xml +[% Hook.process("end") %] |