From 5a135a8ef7433ccb6093116e9a106263c85996f8 Mon Sep 17 00:00:00 2001 From: "bugreport%peshkin.net" <> Date: Wed, 28 Sep 2005 00:16:55 +0000 Subject: Bug 307602: Smooth attach_data upgrade for sites with huge attachment tables Patch by Joel Peshkin r=mkanat, a=justdave --- Bugzilla/DB/Mysql.pm | 10 ++++++++++ checksetup.pl | 15 ++------------- 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/Bugzilla/DB/Mysql.pm b/Bugzilla/DB/Mysql.pm index c3824ab9a..952d49ff9 100644 --- a/Bugzilla/DB/Mysql.pm +++ b/Bugzilla/DB/Mysql.pm @@ -490,6 +490,16 @@ sub bz_setup_database { {TYPE => 'DATETIME', NOTNULL => 1}); } + # 2005-09-24 - bugreport@peshkin.net, bug 307602 + # Make sure that default 4G table limit is overridden + my $row = $self->selectrow_hashref("SHOW TABLE STATUS LIKE 'attach_data'"); + if ($$row{'Create_options'} !~ /MAX_ROWS/i) { + print "Converting attach_data maximum size to 100G...\n"; + $self->do("ALTER TABLE attach_data + AVG_ROW_LENGTH=1000000, + MAX_ROWS=100000"); + } + } diff --git a/checksetup.pl b/checksetup.pl index 3e84e19d3..3f4a68b78 100755 --- a/checksetup.pl +++ b/checksetup.pl @@ -4019,19 +4019,8 @@ $dbh->bz_add_index('attachments', 'attachments_submitter_id_idx', if ($dbh->bz_column_info("attachments", "thedata")) { print "Migrating attachment data to its own table...\n"; print "(This may take a very long time)\n"; - my $sth_get1 = $dbh->prepare("SELECT attach_id - FROM attachments"); - my $sth_get2 = $dbh->prepare("SELECT thedata - FROM attachments WHERE attach_id = ?"); - $sth_get1->execute(); - while (my ($id) = $sth_get1->fetchrow_array) { - $sth_get2->execute($id); - my ($thedata) = $sth_get2->fetchrow_array; - my $sth_put = $dbh->prepare("INSERT INTO attach_data - (id, thedata) VALUES ($id, ?)"); - $sth_put->bind_param(1, $thedata, $dbh->BLOB_TYPE); - $sth_put->execute(); - } + $dbh->do("INSERT INTO attach_data (id, thedata) + SELECT attach_id, thedata FROM attachments"); $dbh->bz_drop_column("attachments", "thedata"); } -- cgit v1.2.3-24-g4f1b