From 89de356424ce5717e6b5e0dbbd65609b84df40a0 Mon Sep 17 00:00:00 2001 From: "gerv%gerv.net" <> Date: Thu, 22 Jan 2004 16:10:53 +0000 Subject: Bug 227155 - make sure running collectstats.pl twice in a day, or migrating data from old charts to new charts where there are duplicate entries, doesn't cause an SQL error. Patch by gerv; r,a=justdave. --- checksetup.pl | 8 ++++++++ 1 file changed, 8 insertions(+) (limited to 'checksetup.pl') diff --git a/checksetup.pl b/checksetup.pl index 1d6240867..255fc56e0 100755 --- a/checksetup.pl +++ b/checksetup.pl @@ -3707,6 +3707,9 @@ if (!$series_exists) { "(series_id, date, value) " . "VALUES (?, ?, ?)"); + my $deletesth = $dbh->prepare("DELETE FROM series_data + WHERE series_id = ? AND date = ?"); + # Fields in the data file (matches the current collectstats.pl) my @statuses = qw(NEW ASSIGNED REOPENED UNCONFIRMED RESOLVED VERIFIED CLOSED); @@ -3786,6 +3789,11 @@ if (!$series_exists) { # Insert values into series_data: series_id, date, value my %fielddata = %{$data{$field}}; foreach my $date (keys %fielddata) { + # We need to delete in case the text file had duplicate entries + # in it. + $deletesth->execute($seriesids{$field}, + $dbh->quote($date)); + # We prepared this above $seriesdatasth->execute($seriesids{$field}, $dbh->quote($date), -- cgit v1.2.3-24-g4f1b