Some Shell scripts - mostly here so i can reuse sections
Collect and process sar stats from machines and process with ksar
#!/usr/bin/ksh
# purpose : given a list of machines harvest sar stats for that day
# uses a list of machines (one per line) to process sar - helps if ssh keys are set for access
typeset SAR="/usr/bin/sar -A"
typeset DATE=`date +%a%b%y`
# storage location
typeset ARCHIVE=/export/home/greid/archive
typeset PROCESS=/export/home/greid/public_html
typeset MACH_LIST=/export/home/greid/harvest_list
typeset KSAR=/export/home/greid/kSar-4.0.14.jar
typeset KSARG="\" SolariscpuSar SolarisRqueueSar SolarisSqueueSar SolarisbufferS
ar SolarisswapingSar SolarissyscalSar SolarisfileSar SolaristtySar SolarismsgSar
SolarispagingSar2 SolarispagingSar1 SolarismemSar SolariskmasmlSar Solariskmalg
Sar SolariskmaovzSar \""
#typeset KSARG="\" SolariscpuSar SolarissyscalSar SolarismemSar \""
# create the dir if not existing
if [ ! -d $ARCHIVE ];then
echo "Create Archive"
mkdir $ARCHIVE
fi
if [ ! -d $PROCESS ];then
echo "building process dir"
mkdir $PROCESS
if [ $? != 0 ];then
echo "problem createing $process, aborting"
exit 1
fi
fi
if [ -f $PROCESS/index2.html ];then
rm $PROCESS/index2.html
fi
# echo <<EOF > $PROCESS/index.html
# <HTML>
# <HEAD>
# <title>
# Some Sar Graphs
# </TITLE>
# <BODY>
# <h1> Some Sar Graphs from $DATE</h1>
# <p></p>
# EOF
while read machine
do
if [ ! -d $ARCHIVE/$machine ];then
echo "createing output dir"
mkdir $ARCHIVE/$machine
fi
typeset machine_index="${machine}_index.html"
typeset seven_index="${machine}_7day_index.html"
ssh $machine "$SAR" < /dev/null > $ARCHIVE/$machine/$DATE.sar
if [ $? != 0 ];then
echo "problem getting sar from $machine , aborting"
exit 1
fi
#clean up old 7day sar file
if [ -f $ARCHIVE/$machine/7day.sar ];then
rm $ARCHIVE/$machine/7day.sar
fi
if [ -d $PROCESS/$machine ];then
rm -rf $PROCESS/$machine
fi
mkdir $PROCESS/$machine
#build a new one for a weeks worth of sar output
find $ARCHIVE/$machine/* -ctime -7 -exec cat {} >> $ARCHIVE/$machine/7day.sa
r \;
java -jar $KSAR -input $ARCHIVE/$machine/$DATE.sar -graph "$KSARG" -addHTML
-outputPNG $PROCESS/$machine/$machine -width 400 -height 300 -showTrigger
java -jar $KSAR -input $ARCHIVE/$machine/7day.sar -graph "$KSARG" -addHTML -
outputPNG $PROCESS/$machine/${machine}_7day -width 400 -height 300 -showTrigge
r
#echo "DEBUG - $machine , $machine_index ,$seven_index"
echo "<a href=\"$machine/$machine_index\"> $machine </a><br>" >> $PROCESS/in
dex2.html
echo "<a href=\"$machine/$seven_index\"> $machine - 7day </a><br>" >> $PROCE
SS/index2.html
done < $MACH_LIST
File checksum db tool
#!/usr/bin/perl
use strict;
use File::Path;
use File::Basename;
use Storable;
use Digest::SHA1;
my %chksums;
my $CHKSUMDB="/usr/local/lib/script_check/cksumdb.dat";
my $FILELIST="/usr/local/etc/script_check_list";
my $ARCHIVE="/usr/local/lib/script_check/script_archive";
#set up checks
if ( ! -f $FILELIST )
{
print "No list to check\n";
}
if ( ! -d $ARCHIVE )
{
[greid@admsup01-mgt] > cat script_check.pl
#!/usr/bin/perl
use strict;
use File::Path;
use File::Basename;
use Storable;
use Digest::SHA1;
my %chksums;
my $CHKSUMDB="/usr/local/lib/script_check/cksumdb.dat";
my $FILELIST="/usr/local/etc/script_check_list";
my $ARCHIVE="/usr/local/lib/script_check/script_archive";
#set up checks
if ( ! -f $FILELIST )
{
print "No list to check\n";
}
if ( ! -d $ARCHIVE )
{
print "No Archive Dir , building it\n";
mkdir $ARCHIVE or die "cant make dir";
}
if ( ! -f $CHKSUMDB)
{
print "\nbuilding flat hash table on disk";
$chksums{"snafu"} = "snafu";
store(\%chksums,$CHKSUMDB);
}
#load hash from disk
%chksums = %{retrieve($CHKSUMDB) } or die "cant open checksum database";
open(SCRIPTS,$FILELIST)
or die "cant open list file";
while(<SCRIPTS>)
{
my ($machine,$filename) = split;
# fetch the script from the machine using vanilla scp
#and place into the archive dir.
if ( ! -d "$ARCHIVE/$machine")
{
mkdir "$ARCHIVE/$machine" or die "wft";
}
my $dirname=dirname($filename);
my $fpath="$ARCHIVE/$machine$filename";
if (! -d "$ARCHIVE/$machine$dirname")
{
print "\nno path , building it\n";
mkpath("$ARCHIVE/$machine$dirname") or die " can't build path";
}
if ( -f "$ARCHIVE/$machine$filename")
{
rename("$ARCHIVE/$machine$filename","$fpath.prev");
}
my $cmd="/usr/bin/scp $machine:$filename $fpath";
my $res = `$cmd`;
# now we have the file check it .
my $hkey="$machine:$fpath";
open (FILE,$fpath) or die "cant open $fpath";
binmode(FILE);
my $cksum = Digest::SHA1->new;
while(<FILE>)
{
$cksum->add($_);
}
if (exists $chksums{"$hkey"})
{
if ($cksum->hexdigest == $chksums{"$hkey"})
{
print "$machine:$filename has not changed\n";
unlink("$fpath.prev");
}
else
{
print "$machine:$filename has changed - Raise Sev 1 Ticket\n";
my @diff = `/usr/bin/diff $fpath $fpath.prev`;
print @diff;
}
}
else
{
#its not in the hash so add it to the list!
$chksums{"$hkey"} = $cksum->hexdigest;
}
# unlink ("$fpath.prev");
}
# close the files and flush the hash back to disk
close SCRIPTS;
store(\%chksums,$CHKSUMDB);
checksum db build tool
#!/usr/bin/perl
# use this script to rebuild the hash table for the check_script.pl
use strict;
use File::Path;
use File::Basename;
use Storable;
use Digest::SHA1;
my %chksums;
my $CHKSUMDB="/usr/local/lib/script_check/cksumdb.dat";
my $FILELIST="/usr/local/etc/script_check_list";
my $ARCHIVE="/usr/local/lib/script_check/script_archive";
#set up checks
if ( ! -f $FILELIST )
{
print "No list to check\n";
}
if ( ! -d $ARCHIVE )
{
print "No Archive Dir , building it\n";
mkdir $ARCHIVE or die "cant make dir";
}
if ( ! -f $CHKSUMDB)
{
print "\nbuilding flat hash table on disk";
$chksums{"snafu"} = "snafu";
store(\%chksums,$CHKSUMDB);
}
#load hash from disk
%chksums = %{retrieve($CHKSUMDB) } or die "cant open checksum database";
open(SCRIPTS,$FILELIST)
or die "cant open list file";
while(<SCRIPTS>)
{
my ($machine,$filename) = split;
# fetch the script from the machine using vanilla scp
#and place into the archive dir.
if ( ! -d "$ARCHIVE/$machine")
{
mkdir "$ARCHIVE/$machine" or die "wft";
}
my $dirname=dirname($filename);
my $fpath="$ARCHIVE/$machine$filename";
if (! -d "$ARCHIVE/$machine$dirname")
{
print "\nno path , building it\n";
mkpath("$ARCHIVE/$machine$dirname") or die " can't build path";
}
if ( -f "$fpath.prev")
{
unlink("$fpath.prev");
}
if ( -f "$ARCHIVE/$machine$filename")
{
rename("$ARCHIVE/$machine$filename","$fpath.prev");
}
my $cmd="/usr/bin/scp $machine:$filename $fpath";
my $res = `$cmd`;
# now we have the file check it .
my $hkey="$machine:$fpath";
open (FILE,$fpath) or die "cant open $fpath";
binmode(FILE);
my $cksum = Digest::SHA1->new;
while(<FILE>)
{
$cksum->add($_);
}
$chksums{"$hkey"} = $cksum->hexdigest;
#unlink ("$fpath");
}
while ( my ($k,$v) = each %chksums )
{
print "$k => $v\n";
}
# close the files and flush the hash back to disk
close SCRIPTS;
store(\%chksums,$CHKSUMDB);
__END__
=head1 NAME
build_chksumdb.pl
=head1 SYNOPSIS
Given a list of files /usr/local/etc/script_check_list
it will copy the file to an archive and then compute a SHA1 checksum
and store in checksum hash database.
note snafu is a key in the hash database , this is used internally and is unlikely to cause a namespace issue
if the database does not exist it will be re/created by this script
=head1 FILES
Database :
/usr/local/lib/script_check/cksumdb.dat
Script list:
/usr/local/etc/script_check_list
=head1 SEE ALSO
script_check.pl
=cut
Dump networker clients and spit into a .xls
#!/usr/bin/perl -w
use strict;
use IPC::Open2;
use Spreadsheet::WriteExcel;
#my $nsradmin = 'nsradmin';
my $nsradmin = '/usr/bin/nsradmin';
my ($rdr,$wtr);
#my $pid = open2($rdr,$wtr,$nsradmin, '-s mtxeaxbck53', '-i' , '-');
my $pid = open2($rdr,$wtr,$nsradmin, '-i' , '-');
print $wtr "show name\n";
print $wtr "show networker version\n";
print $wtr "show client OS type\n";
print $wtr "print type:nsr client\n";
close $wtr;
my ($row, $col);
my $workbook = Spreadsheet::WriteExcel->new('nw_client_license.xls')or die "Can\'t Write to spreadsheet file";
my $worksheet = $workbook->add_worksheet("Networker Clients");
my $hformat = $workbook->add_format(
bg_color => 57,
bold => 1,
align => 1
);
$col = $row = 0;
$worksheet->write($row, $col, "Networker Client license Listing",$hformat);
$row++;
$worksheet->write($row, $col, "Host Name",$hformat);
$worksheet->write($row, $col+1, "Host Type",$hformat);
$worksheet->write($row, $col+2, "Version",$hformat);
$row++;
$worksheet->set_column('A:A', 25);
$worksheet->set_column('A:B', 30);
$worksheet->set_column('A:C', 30);
#Set text format up
my $format = $workbook->add_format(
bg_color => 9,
bold => 1,
align => 1,
border => 1
);
$format->set_bg_color();
$format->set_bold(0);
$format->set_color();
$worksheet->write_comment(0, 0, 'Autogen xls report , formating may be wobbly');
my %Hosts;
#data comes in as hostname,os level , client version <CR>
while (<$rdr>)
{
my $host;
chomp;
if (s/\\$//) # ends in backslash
{
$_ .= <$rdr>;
chomp;
redo;
}
if (/name\:\s*(.*);/) { $host=$1; next; };
if (/type\:s*(.*);/) { $Hosts{$host}[0] = $1;next}
if (/version\:\s*(.*);/) { $Hosts{$host}[1] = $1; }
#next unless s/^(.*?),\s*//;
#abuse a hash of arrays to ensure the hostname is unique ;)
#$Hosts{"$1"} = [ split ','];
}
for my $host ( keys %Hosts)
{
$worksheet->write($row, $col, $host,$format);
$worksheet->write($row, $col+1, $Hosts{$host}[0],$format);
$worksheet->write($row, $col+2, $Hosts{$host}[1],$format);
$row++;
}
Read a sunone WS config and report if the sites are up
#!/usr/bin/env perl -w
#Read the server.xml file and do a http get on each url
use XML::Simple;
use LWP::Simple qw(get);
#set $xmlfile to localfile XMLin will cope ;) if required
my $xmlfile= `ssh server"cat /opt/SUNWwbsvr/https-server.emea.domain.com/config/server.xml"`;
my $xs1 = XML::Simple->new();
my $doc = $xs1->XMLin($xmlfile);
my $exitCode = 0;
foreach my $key (keys (%{$doc->{VSCLASS}->{VS}}))
{
my $url="http://$doc->{VSCLASS}->{VS}->{$key}->{urlhosts}";
print "$key - $url - ";
if ( get ($url)){ print "OK\n"} else {print "ERROR\n" ; $exitCode=1;};
}
exit $exitCode;
Full system ufsdump
#!/usr/bin/ksh
# pass the rmt number to the script or it will default to 0
exec 3>&1
exec 1> /var/adm/dumplog 2>&1
FILESYSTEMS=`df -F ufs | awk ' { printf("%s ",$1) } '`
DUMPDEVICE=/dev/rmt/1n
mt -f $DUMPDEVICE rewind
if mt -f $DUMPDEVICE status > /dev/null
then
for FS in $FILESYSTEMS
do
/usr/sbin/ufsdump 0uf $DUMPDEVICE $FS
done
fi