summaryrefslogtreecommitdiffstats
path: root/extensions/SiteMapIndex/Extension.pm
diff options
context:
space:
mode:
authorMary Umoh <umohm12@gmail.com>2017-08-08 23:12:08 +0200
committerDylan William Hardison <dylan@hardison.net>2017-08-08 23:12:08 +0200
commit63dd3061a164345095847e502bb5395bb5570c3a (patch)
tree3022c22ceb767f6e360d62124d558c2accf86a44 /extensions/SiteMapIndex/Extension.pm
parentc4db6d0065992417859f6b8db11e0fcad12051d5 (diff)
downloadbugzilla-63dd3061a164345095847e502bb5395bb5570c3a.tar.gz
bugzilla-63dd3061a164345095847e502bb5395bb5570c3a.tar.xz
Bug 1386336 - Generate robots.txt at checksetup.pl from a template
Diffstat (limited to 'extensions/SiteMapIndex/Extension.pm')
-rw-r--r--extensions/SiteMapIndex/Extension.pm38
1 files changed, 3 insertions, 35 deletions
diff --git a/extensions/SiteMapIndex/Extension.pm b/extensions/SiteMapIndex/Extension.pm
index 1e2010adb..a3f093485 100644
--- a/extensions/SiteMapIndex/Extension.pm
+++ b/extensions/SiteMapIndex/Extension.pm
@@ -90,9 +90,6 @@ sub install_before_final_checks {
}
return if (correct_urlbase() ne 'https://bugzilla.mozilla.org/');
-
-
- $self->_fix_robots_txt();
}
sub install_filesystem {
@@ -127,38 +124,9 @@ EOT
};
}
-sub _fix_robots_txt {
- my ($self) = @_;
- my $cgi_path = bz_locations()->{'cgi_path'};
- my $robots_file = "$cgi_path/robots.txt";
- my $current_fh = new IO::File("$cgi_path/robots.txt", 'r');
- if (!$current_fh) {
- warn "$robots_file: $!";
- return;
- }
-
- my $current_contents;
- { local $/; $current_contents = <$current_fh> }
- $current_fh->close();
-
- return if $current_contents =~ /^Sitemap:/m;
- my $backup_name = "$cgi_path/robots.txt.old";
- print get_text('sitemap_fixing_robots', { current => $robots_file,
- backup => $backup_name }), "\n";
- rename $robots_file, $backup_name or die "backup failed: $!";
-
- my $new_fh = new IO::File($self->package_dir . '/robots.txt', 'r');
- $new_fh || die "Could not open new robots.txt template file: $!";
- my $new_contents;
- { local $/; $new_contents = <$new_fh> }
- $new_fh->close() || die "Could not close new robots.txt template file: $!";
-
- my $sitemap_url = correct_urlbase() . SITEMAP_URL;
- $new_contents =~ s/SITEMAP_URL/$sitemap_url/;
- $new_fh = new IO::File("$cgi_path/robots.txt", 'w');
- $new_fh || die "Could not open new robots.txt file: $!";
- print $new_fh $new_contents;
- $new_fh->close() || die "Could not close new robots.txt file: $!";
+sub before_robots_txt {
+ my ($self, $args) = @_;
+ $args->{vars}{SITEMAP_URL} = correct_urlbase() . SITEMAP_URL;
}
__PACKAGE__->NAME;