#!/usr/bin/perl -w # # Copyright (c) 2005-2011 University of Utah and the Flux Group. # # {{{EMULAB-LICENSE # # This file is part of the Emulab network testbed software. # # This file is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or (at # your option) any later version. # # This file is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public # License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this file. If not, see . # # }}} # use strict; use English; use Getopt::Std; # # Check DB consistency. # sub usage() { print STDOUT "Usage: dumperrorlog [-d]\n"; exit(-1); } my $optlist = "df"; my $debug = 0; my $force = 0; my $tempfile = "/var/tmp/testbed_dumperrorlog_tempfile"; my $keep_important = 4 * 24 * 60 * 60; my $keep_any = 2 * 24 * 60 * 60; my $max_rows = 300000; # # Configure variables # my $TB = "@prefix@"; my $TBOPS = "@TBOPSEMAIL@"; my $ISUTAHEMULAB = "@OURDOMAIN@" eq 'emulab.net'; my $USERNODE = "@USERNODE@"; my $PROXY = "$TB/sbin/dumperrorlog.proxy"; my $SCP = "/usr/bin/scp"; my $SSHTB = "$TB/bin/sshtb"; my $ERRORLOGDB = "@TBERRORLOGDBNAME@"; # un-taint path $ENV{'PATH'} = '/bin:/usr/bin:/usr/local/bin:/usr/site/bin'; delete @ENV{'IFS', 'CDPATH', 'ENV', 'BASH_ENV'}; # # Turn off line buffering on output # $| = 1; # Load the Testbed support stuff. use lib "@prefix@/lib"; use libdb; use libtestbed; # Protos sub fatal($); # # Only real root can call this. # if ($UID != 0) { print STDERR "You must be root to run this script!\n"; exit(-1); } # # Only one script can run at a time, for now just # abort instead of waiting # my $lock_res = TBScriptLock("dumperrorlog", 1, 0); #exit( 0) if $lock_res == TBSCRIPTLOCK_IGNORE(); exit(-1) if $lock_res != TBSCRIPTLOCK_OKAY(); # # Form a temp name. # my $logname = TBMakeLogname("dumperrorlog"); # # Parse command arguments. Once we return from getopts, all that should # left are the required arguments. # my %options = (); if (! getopts($optlist, \%options)) { usage(); } if (@ARGV) { usage(); } if (defined($options{"d"})) { $debug++; } if (defined($options{"f"})) { $force++; } # # Reopen both stdout and stderr so that we can record all the output for # later mailing. # if (! $debug) { open(STDERR, ">> $logname") or die("opening $logname for STDERR: $!"); open(STDOUT, ">> $logname") or die("opening $logname for STDOUT: $!"); } # # Get the last log index we archived out. # my $query_result = DBQueryFatal("select idx from emulab_indicies ". "where name='lastlog_seqnum'"); my ($minseq) = $query_result->fetchrow_array(); if (!defined($minseq)) { DBQueryFatal("insert into emulab_indicies (name, idx) ". "values ('lastlog_seqnum', 0)"); $minseq = 0; } # # And the last experiment index we sent over. # $query_result = DBQueryFatal("select idx from emulab_indicies ". "where name='lastlog_exptidx'"); my ($exptidx_min) = $query_result->fetchrow_array(); if (!defined($exptidx_min)) { DBQueryFatal("insert into emulab_indicies (name, idx) ". "values ('lastlog_exptidx', 0)"); $exptidx_min = 0; } # And the current largest one. $query_result = DBQueryFatal("select MAX(exptidx) from experiment_stats"); my ($exptidx_max) = $query_result->fetchrow_array(); if (!defined($exptidx_max)) { fatal("Could not get largest experiment index!"); } # # Grab the index number for the NOTICE priority. # $query_result = DBQueryFatal("select priority from priorities ". "where priority_name='NOTICE'"); my ($NOTICE) = $query_result->fetchrow_array(); if (!defined($NOTICE)) { fatal("No priority named 'NOTICE' in the priorities table!"); } # # Now get the largest log index to copy # $query_result = DBQueryFatal("select MAX(seq) from log ". "where seq > $minseq " . " and seq <= $minseq + $max_rows "); my $maxseq = $query_result->fetchrow_array(); $maxseq = $minseq unless defined $maxseq; print "Copying log entries ($minseq,$maxseq]\n"; print "Exptidx ($exptidx_min, $exptidx_max]\n"; # # Might not be any new entries ... # if ($minseq != $maxseq) { # # Backup old outfile # if (-e "${tempfile}.log") { unlink("${tempfile}.log.save") if (-e "${tempfile}.log.save"); system("/bin/mv -f ${tempfile}.log ${tempfile}.log.save"); } # # Grab all of the new entries and store to the file # DBQueryFatal("select * from log ". "where seq > $minseq " . "and seq <= $maxseq " . "into outfile '${tempfile}.log' "); system("echo \"load data infile '${tempfile}.log' replace into table log\" ". " | mysql $ERRORLOGDB"); fatal("Could not load into archive DB!") if ($?); if ($ISUTAHEMULAB) { # XXX This awful hack will go away when Kevin is happy with # the error logging code. # # # Copy the file over to ops and load it into the DB over there. # system("$SCP -p ${tempfile}.log ". " ${USERNODE}:/var/db/mysql/$ERRORLOGDB") == 0 or fatal("Could not copy $tempfile to $USERNODE!"); system("$SSHTB -host $USERNODE $PROXY log") == 0 or fatal("Could not load log entries into ops DB!"); } } # # Now the other tables Kevin needs # foreach my $tablename ("scripts", "priorities", "causes", "session_info", "errors", "users", "experiments", "report_error", "report_context", "report_assign_violation") { unlink("${tempfile}.${tablename}") if -e "${tempfile}.${tablename}"; if (($tablename eq "errors" || $tablename eq "session_info") && !$force) { DBQueryFatal("select * from $tablename where session>=$minseq and session<=$maxseq ". "into outfile '${tempfile}.${tablename}' "); } elsif ($tablename eq "report_error" || $tablename eq "report_context" || $tablename eq "report_assign_violation") { DBQueryFatal("select * from $tablename where seq>=$minseq and seq<=$maxseq ". "into outfile '${tempfile}.${tablename}' "); } elsif ($tablename eq "users") { DBQueryFatal("select uid,unix_uid from $tablename ". "into outfile '${tempfile}.${tablename}' "); } elsif ($tablename eq "experiments") { next if ($exptidx_max == $exptidx_min); DBQueryFatal("select exptidx,pid,eid from experiment_stats ". "where exptidx > $exptidx_min and ". " exptidx <= $exptidx_max ". "into outfile '${tempfile}.${tablename}' "); } else { DBQueryFatal("select * from $tablename ". "into outfile '${tempfile}.${tablename}' "); } system("echo \"load data infile '${tempfile}.${tablename}' ". " replace into table $tablename\" | mysql $ERRORLOGDB"); fatal("Could not load $tablename into archive DB!") if ($?); if ($ISUTAHEMULAB) { # # Copy the file over to ops and load it into the DB over there. # system("$SCP -p ${tempfile}.${tablename} ". " ${USERNODE}:/var/db/mysql/$ERRORLOGDB") == 0 or fatal("Could not copy ${tempfile}.${tablename} to $USERNODE!"); system("$SSHTB -host $USERNODE $PROXY $tablename") == 0 or fatal("Could not load $tablename entries into ops DB!"); unlink("${tempfile}.${tablename}"); } } # Okay, now that we have really archived them away, update the # seq number. DBQueryFatal("update emulab_indicies set idx='$maxseq' ". "where name='lastlog_seqnum'"); # Okay, now that we have really sent over the experiment_stats stuff. DBQueryFatal("update emulab_indicies set idx='$exptidx_max' ". "where name='lastlog_exptidx'"); # # And then age out entries. # DBQueryFatal("delete from log ". "where stamp < (UNIX_TIMESTAMP(now()) - $keep_important)". " and seq <= $maxseq"); # # And age out non critical data. # DBQueryFatal("delete from log ". "where priority > $NOTICE and ". " stamp < (UNIX_TIMESTAMP(now()) - $keep_any)". " and seq <= $maxseq"); unlink("$logname") if (-e $logname); TBScriptUnlock(); exit(0); sub fatal($) { my ($msg) = @_; SENDMAIL($TBOPS, "DumpErrorLog Failed", $msg, undef, undef, ($logname)); unlink("$logname") if (-e $logname); die("*** $0:\n". " $msg\n"); }