"Fossies" - the Fresh Open Source Software Archive

Member "websec-1.9.0/websec" (20 Jan 2006, 23337 Bytes) of package /linux/www/old/websec-1.9.0.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Perl source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 #!/usr/bin/perl -w
    2 
    3 #################################################################################
    4 # 
    5 # Web Secretary
    6 #
    7 # Retrieves a list of web pages and send the pages via email to
    8 # a designated recipient. It can optionally compare the page with a
    9 # previously retrieved page, highlight the differences and send the
   10 # modified page to the recipient instead.
   11 #
   12 # Copyright (C) 1998  Chew Wei Yih
   13 # Copyright (C) 2004,2005 Baruch Even <baruch@ev-en.org>
   14 #
   15 # This program is free software; you can redistribute it and/or
   16 # modify it under the terms of the GNU General Public License
   17 # as published by the Free Software Foundation; either version 2
   18 # of the License, or (at your option) any later version.
   19 #
   20 # This program is distributed in the hope that it will be useful,
   21 # but WITHOUT ANY WARRANTY; without even the implied warranty of
   22 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
   23 # GNU General Public License for more details.
   24 #
   25 # You should have received a copy of the GNU General Public License
   26 # along with this program; if not, write to the Free Software
   27 # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA  02111-1307, USA.
   28 #
   29 #################################################################################
   30 
   31 my $feature_compress = 1;
   32 
   33 use HTTP::Status;
   34 use HTTP::Date;
   35 use LWP::UserAgent;
   36 use URI;
   37 eval { require Compress::Zlib; } or $feature_compress=0;
   38 use POSIX qw(strftime);
   39 use File::Spec;
   40 use Getopt::Long;
   41 use Pod::Usage;
   42 use File::Temp qw/tempfile/;
   43 use File::Copy;
   44 
   45 my $version = "1.9.0";
   46 
   47 # Print introduction
   48 print "Web Secretary Ver $version\n";
   49 print "By Chew Wei Yih Copyleft (c) 1998\n\n";
   50 
   51 # Initialize parameters
   52 
   53 $help = 0;
   54 $man  = 0;
   55 $urllist = "url.list";
   56 $base = "";
   57 
   58 # Parse command line options
   59 GetOptions(
   60     "urllist=s" => \$urllist,
   61     "help|?" => \$help,
   62     "man"    => \$man,
   63     "base=s" => \$base
   64 );
   65 
   66 pod2usage(1) if $help;
   67 pod2usage( -exitstatus => 0, -verbose => 2 ) if $man;
   68 
   69 if ($base eq "") {
   70     if ( -e $urllist ) {
   71         $base = ".";
   72     } else {
   73         $base = $ENV{HOME} . "/.websec";
   74     }
   75 }
   76 
   77 # Remove trailing slash from base, we will add it ourself everywhere needed
   78 $base =~ s/\/$//;
   79 
   80 # Prepare pathnames.
   81 $archive = "$base/archive";
   82 mkdir $base,    0750 if !-d $base;
   83 mkdir $archive, 0750 if !-d $archive;
   84 ($current_fh, $page_current) = tempfile(DIR=>"$base", UNLINK=>1, SUFFIX=>".html");
   85 $current_fh=-1;
   86 
   87 # Red Hat has sendmail in /usr/sbin/sendmail, but LWP::Protocol::mailto
   88 # expects it in /usr/lib/sendmail.  Revert to the Red Hat location if there
   89 # is no executable at the expected location.
   90 use vars qw($LWP::Protocol::mailto::SENDMAIL);
   91 $LWP::Protocol::mailto::SENDMAIL = "/usr/sbin/sendmail"
   92   unless -x "/usr/lib/sendmail";
   93 
   94 # Location of webdiff, if it's in the same directory as websec, use it,
   95 # this enables simply opening the archive and using the program inplace.
   96 use FindBin;
   97 $webdiffbin = "$FindBin::Bin/webdiff";
   98 if ( !-e $webdiffbin ) {
   99     $webdiffbin = "webdiff";
  100 }
  101 
  102 $htmldiffbin = "$FindBin::Bin/htmldiff";
  103 if ( !-e $htmldiffbin ) {
  104   $htmldiffbin = "./htmldiff";
  105   if ( !-e $htmldiffbin ) {
  106       $htmldiffbin = "htmldiff";
  107   }
  108 }
  109 
  110 # prepare digest
  111 @digest = ();
  112 @htmldigest = ();
  113 
  114 # Set default values
  115 local %defaults = (
  116     url        => "",
  117     auth       => "none",
  118     name       => "",
  119     prefix     => "",
  120     diff       => "webdiff",
  121     hicolor    => "blue",
  122     asciimarker => 0,
  123     ignore     => "none",
  124     ignoreurl  => "none",
  125     email      => "",
  126     emaillink  => "",
  127     emailerror => 1,
  128     program    => "",
  129     programdigest => "",
  130     proxy      => "",
  131     proxyauth  => "none",
  132     randomwait => 0,
  133     retry      => 3,
  134     retrywait  => 0,
  135     timeout    => 20,
  136     tmin       => 0,
  137     tmax       => 99999,
  138     addsubject => "",
  139     digest     => "false",
  140     useragent  => "WebSec/$version",
  141     datefmt    => " - %d %B %Y (%a)",
  142     mailfrom   => "",
  143 );
  144 %siteinfo = %defaults;
  145 
  146 # Default return code
  147 $rc = 0;
  148 
  149 my $urllistfile = $base . "/" . "$urllist";
  150 if (! -e $urllistfile) {
  151     print STDERR "Missing config file $urllistfile, Exiting.\n";
  152     exit 1;
  153 }
  154 
  155 open ARGV, "$urllistfile" unless exists $ARGV[0];
  156 
  157 # Loop through input file and process all sites listed
  158 while (<>) {
  159     chop $_;
  160     s/^\s*//;
  161     s/\s*$//;
  162 
  163     # Ignore comments
  164     next if (m/^#/);
  165     # Stop with a finish marker
  166     last if (m/^__END__/);
  167 
  168     # Handle non-empty lines
  169     if ( length != 0 ) {
  170         $rc = &HandleInput();
  171         if ( $rc != 0 ) { last; }
  172         next;
  173     }
  174 
  175     # Handle line separators
  176     $rc = &HandleSite();
  177     if ( $rc != 0 ) { last; }
  178     %siteinfo = %defaults;
  179 }
  180 
  181 # Process last site if available
  182 if ( $rc == 0 && $siteinfo{url} ne "" ) { $rc = &HandleSite(); }
  183 
  184 # Delete temp files
  185 unlink($page_current);
  186 
  187 if (@digest) {
  188     $linkmsg =
  189       "The contents of the following URLs have changed:\n\n"
  190       . join ( "\n", @digest ) . "\n";
  191     $subj = "$addsubject$today";
  192     &MailMessage( $linkmsg, $subj, $digestEmail, $siteinfo{mailfrom} );
  193 }
  194 
  195 if (@htmldigest) {
  196     ($OUTPAGE, $pagename) = tempfile(DIR=>"$base", UNLINK=>1, SUFFIX=>".html");
  197     print OUTPAGE "<HTML><BODY>The contents of the following URLs has changed:<P><P>";
  198     foreach (@htmldigest) { print OUTPAGE "$_<P>\n"; }
  199     print OUTPAGE "<P></BODY></HTML>";
  200     close(OUTPAGE);
  201 
  202     &ShowDocument( $program, $pagename);
  203 }
  204 
  205 
  206 # End of main program
  207 exit $rc;
  208 
  209 # Handle setting of parameters
  210 # Params: none
  211 sub HandleInput() {
  212 
  213     # Get keyword, value pair
  214     ( $keyword, $value ) = split ( /=/, $_, 2 );
  215 
  216     if (not defined $value) {
  217         print STDERR "Keyword '$keyword' has no value.\n";
  218         exit 1;
  219     }
  220     
  221     $keyword =~ s/^\s*(.*?)\s*$/$1/;
  222     $keyword =~ tr/A-Z/a-z/;
  223     $value   =~ s/^\s*\"?(.*?)\"?\s*$/$1/;
  224 
  225     # Check if valid keyword
  226     if ( not defined $defaults{$keyword} ) {
  227         print qq(Unrecognized keyword in line $.: "$_". Keyword="$keyword".\n);
  228         return -1;
  229     }
  230 
  231     # Allow values from the environment
  232     while ($value =~ m/\${([^}]+)}/) {
  233         if (not exists $ENV{$1}) {
  234             print STDERR "Used environment variable '$1' but it is not defined, aborting.\n";
  235             exit 1;
  236         }
  237         $value =~ s/\${([^}]+)}/$ENV{$1}/;
  238     }
  239 
  240     $siteinfo{$keyword} = $value;
  241     return 0;
  242 }
  243 
  244 # Handle downloading, highlighting and mailing of each site.
  245 # Params: none
  246 # Returns: 0 => OK, -1 => Error
  247 sub HandleSite() {
  248 
  249     # Get parameter values for this page
  250     $url        = $siteinfo{url};
  251     $name       = $siteinfo{name};
  252     $prefix     = $siteinfo{prefix};
  253     $diff       = $siteinfo{diff};
  254     $hicolor    = $siteinfo{hicolor};
  255     $ignore     = $siteinfo{ignore};
  256     $ignoreurl  = $siteinfo{ignoreurl};
  257     $email      = $siteinfo{email};
  258     $emailLink  = $siteinfo{emaillink};
  259     $program    = $siteinfo{program};
  260     $programdigest = $siteinfo{programdigest};
  261     $proxy      = $siteinfo{proxy};
  262     $randomwait = $siteinfo{randomwait};
  263     $retry      = $siteinfo{retry};
  264     $retrywait  = $siteinfo{retrywait};
  265     $timeout    = $siteinfo{timeout};
  266     $tmin       = $siteinfo{tmin};
  267     $tmax       = $siteinfo{tmax};
  268     $addsubject = $siteinfo{addsubject};
  269     $digest     = $siteinfo{digest};
  270     $useragent  = $siteinfo{useragent};
  271     $datefmt    = $siteinfo{datefmt};
  272 
  273     # Get today's date in the format we want.
  274     $today = strftime $datefmt, localtime;
  275 
  276     # If block without URL, assume parameter setting block and update default
  277     # values
  278     if ( $url eq "" ) {
  279         %defaults = %siteinfo;
  280         return 0;
  281     }
  282 
  283     # If essential parameters are not present, abort with error
  284     if ( $name eq ""
  285         || $prefix eq ""
  286         || ( $email eq "" && $emailLink eq "" && $program eq "" ) )
  287     {
  288         print "Name, prefix, program or email info missing from URL: $url.\n";
  289         return -1;
  290     }
  291 
  292     # Prepare for downloading this page
  293     print "Processing => $url ($name) ...\n";
  294     $pagebase            = "$archive/$prefix";
  295     $page_previous       = "$pagebase.html";
  296     $page_archive        = "$pagebase.old.html";
  297     (undef, $outgoing)     = tempfile( SUFFIX => '.html' );
  298     $page_previousExists = 1;
  299     open( FILE, $page_previous ) or $page_previousExists = 0;
  300     # get modification time
  301     if ($page_previousExists) { $mtime = (stat(FILE))[9]; } 
  302     close(FILE);
  303     $subj    = "$addsubject $name$today - $url";
  304     $webdiff =
  305 "$webdiffbin --basedir=$base --archive=$page_previous --current=$page_current --out=$outgoing "
  306       . "--hicolor=$hicolor --ignore=$ignore --ignoreurl=$ignoreurl --tmin=$tmin --tmax=$tmax";
  307      $htmldiff = "$htmldiffbin $page_previous $page_current > $outgoing";
  308 
  309     if ($siteinfo{asciimarker}) {
  310         $webdiff .= " --asciimarker";
  311     }
  312 
  313     # Download URL using LWP
  314     $ua = new LWP::UserAgent;
  315     $ua->agent($useragent);
  316     $ua->timeout($timeout);
  317     $ua->env_proxy;
  318     if ( $proxy ne "" ) { $ua->proxy( http => $proxy ); }
  319     $req = PrepareRequest($url);
  320 
  321     # set If-Modified-Since to the modification time of the archive file
  322     if ($page_previousExists) { $req->header('If-Modified-Since' => time2str($mtime)); }
  323 
  324     # Try up to '$retry' times to download URL
  325     $counter = 0;
  326     srand;
  327     while ( $counter < $retry ) {
  328         $resp = $ua->request($req);
  329 
  330         if ($resp->code == 304) { 
  331             print "Document not changed.\n";
  332             return 0;
  333         }
  334 
  335         if ( ! $resp->is_success ) {
  336             $counter++;
  337             if ( $randomwait > 0 ) {
  338                 $random = int( rand $randomwait ) + 1;
  339                 sleep $random;
  340             }
  341             else { sleep $retrywait; }
  342             next;
  343         }
  344 
  345         # Leave if there is no refresh header
  346         if (!$resp->header("Refresh")) { last; }
  347 
  348         # Handle it if the refresh is for zero seconds
  349         ( $time, $refresh_url ) = split(/;/, $resp->header("Refresh"), 2);
  350         if ($time > 0) { last; }
  351         
  352         # Convert to absolute URL and refetch the page
  353         ( undef, $refresh_to ) = split(/=/, $refresh_url, 2);
  354         $newurl = URI->new_abs($refresh_to, $url)->as_string();
  355 
  356         $req = PrepareRequest($newurl);
  357         # Don't reset the counter, we still want to protect from endless loops
  358     }
  359 
  360     # If URL is successfully downloaded
  361     if ( $resp->is_success ) {
  362         # Check if the data is gzip compressed, decompress if it is.
  363         if (($resp->content_encoding || "") =~ /gzip/) {
  364             my $new_content;
  365 
  366             if ($feature_compress) {
  367                 $new_content = Compress::Zlib::memGunzip($resp->content);
  368             } else {
  369                 $new_content = "Server sent gzip compressed data, and we are missing Compress::Gzip";
  370             }
  371             if (defined $new_content) {
  372                 $resp->content($new_content);
  373                 $resp->content_length(length $new_content);
  374                 $resp->content_encoding("");
  375             }
  376         }
  377     
  378         open( HTML_FILE, ">$page_current" );
  379         print HTML_FILE "<!-- X-URL: ", $resp->base, " -->\n";
  380         print HTML_FILE "<BASE HREF= \"", $resp->base, "\">\n";
  381         my ($type, $charset) = $resp->content_type;
  382         if ($charset) {
  383             print HTML_FILE "<meta http-equiv=\"Content-Type\" content=\"", $type, "; ", $charset ,"\"/>\n";
  384         }
  385         print HTML_FILE $resp->content;
  386         close HTML_FILE;
  387 
  388         # set the modification date for later retrieval
  389         $mtime = $resp->header("Last-Modified");
  390         if ($mtime) {
  391             $mtime = str2time($mtime);
  392             if ($mtime) { # Make sure the time was in a legal format
  393                     utime($mtime, $mtime, $page_current);
  394             }
  395         }
  396 
  397         if ( $diff eq "webdiff" ) {
  398             if ( $page_previousExists == 1 ) {
  399                 print
  400 "Highlighting differences from previous version of webpage ...\n";
  401                 $rc = system($webdiff);
  402                 if ( $rc != 0 ) {
  403                     if ( $email ne "" ) {
  404                         print "Sending highlighted page to $email ...\n";
  405                         MailDocument( $outgoing, $subj, $email,
  406                             $siteinfo{mailfrom} );
  407                     }
  408                     if ( $emailLink ne "" ) {
  409                         print "Sending link to $emailLink ...\n";
  410                         if ( ( $digest ne "no" ) && ( $digest ne "false" ) ) {
  411                             push @digest, $url;
  412                             ($digestEmail) or ( $digestEmail = $emailLink );
  413                         }
  414                         else {
  415                             my $filepath = File::Spec->rel2abs($page_previous);
  416                             $linkmsg =
  417 "The contents of the following URL has changed:\n$url\n\nIt can also be found at:\nfile://$filepath\n";
  418                             MailMessage(
  419                                 $linkmsg,   $subj,
  420                                 $emailLink, $siteinfo{mailfrom}
  421                             );
  422                         }
  423                     }
  424                     if ( $program ne "" ) {
  425                         if ( $programdigest ne "true" ) {
  426                             ShowDocument( $program, $outgoing );
  427                         }
  428                         else {
  429                             push @htmldigest, "<A HREF=\"".$outgoing."\">Changes for ".$name."</A>".
  430                                     "&nbsp;<A HREF=\"".$page_archive."\">previous page</A>".
  431                                     "&nbsp;<A HREF=\"".$page_previous."\">current page</A>".
  432                                     "&nbsp;<A HREF=\"".$url."\">current page on the net</A><P><P>";
  433                         }
  434                     }
  435                 }
  436                 else {
  437                     print "No changes were detected.\n";
  438                 }
  439                 move $page_previous, $page_archive;
  440                 move $page_current,  $page_previous;
  441             }
  442             else {
  443                 print
  444                   "No previous version for this page. Storing in archive ...\n";
  445                 move $page_current, $page_previous;
  446             }
  447         }
  448         elsif ( $diff eq "htmldiff" )
  449         {
  450             if ( $page_previousExists == 1 ) {
  451                 print
  452 "Highlighting differences from previous version of webpage using htmldiff...\n";
  453                 $rc = system($htmldiff);
  454                 if ( $rc != 0 ) {
  455                     if ( $email ne "" ) {
  456                         print "Sending highlighted page to $email ...\n";
  457                         MailDocument( $outgoing, $subj, $email );
  458                     }
  459                     if ( $emailLink ne "" ) {
  460                         print "Sending link to $emailLink ...\n";
  461                         if ( ( $digest ne "no" ) && ( $digest ne "false" ) ) {
  462                             push @digest, $url;
  463                             ($digestEmail) or ( $digestEmail = $emailLink );
  464                         }
  465                         else {
  466                             my $filepath = File::Spec->rel2abs($page_previous);
  467                             $linkmsg =
  468 "The contents of the following URL has changed:\n$url\n\nIt can also be found at:\nfile://$filepath\n";
  469                             MailMessage( $linkmsg, $subj, $emailLink );
  470                         }
  471                     }
  472                     if ( $program ne "" ) {
  473                         if ( $programdigest ne "true" ) {
  474                             ShowDocument( $program, $outgoing );
  475                         }
  476                         else {
  477                             push @htmldigest, "<A HREF=\"".$outgoing."\">Changes for ".$name."</A>".
  478                                     "&nbsp;<A HREF=\"".$page_archive."\">previous page</A>".
  479                                     "&nbsp;<A HREF=\"".$page_previous."\">current page</A>".
  480                                     "&nbsp;<A HREF=\"".$url."\">current page on the net</A><P><P>";
  481                         }
  482                     }
  483                 }
  484                 else {
  485                     print "No changes were detected.\n";
  486                 }
  487                 move $page_previous, $page_archive;
  488                 move $page_current,  $page_previous;
  489             }
  490             else {
  491                 print
  492                   "No previous version for this page. Storing in archive ...\n";
  493                 move $page_current, $page_previous;
  494             }
  495         }
  496         else {
  497             if ( $email ne "" ) {
  498                 MailDocument( $page_current, $subj, $email,
  499                     $siteinfo{mailfrom} );
  500             }
  501             if ($page_previousExists) { move $page_previous, $page_archive; }
  502             rename $page_current, $page_previous;
  503         }
  504     }
  505 
  506     # If unable to download URL
  507     else {
  508         print "Unable to retrieve page.\n";
  509         $errmsg =
  510           "Unable to retrieve $name ($url).\n\n"
  511           . "Detailed error as follows:\n"
  512           . $resp->error_as_HTML;
  513 
  514         if ( $email ne "" && $siteinfo{emailerror} ) {
  515             MailMessage( $errmsg, $subj, $email, $siteinfo{mailfrom} );
  516         }
  517         if ( $emailLink ne "" && $siteinfo{emailerror} ) {
  518             if ( ( $digest ne "no" ) && ( $digest ne "false" ) ) {
  519                 push @digest, "Unable to retrieve: $url";
  520                 ($digestEmail) or ( $digestEmail = $emailLink );
  521             }
  522             else {
  523                 MailMessage( $errmsg, $subj, $emailLink, $siteinfo{mailfrom} );
  524             }
  525         }
  526     }
  527 
  528     unlink($outgoing);
  529 
  530     return 0;
  531 }
  532 
  533 sub PrepareRequest() {
  534     my $url = shift (@_);
  535 
  536     $req = new HTTP::Request( 'GET', $url );
  537 
  538     my $auth = $siteinfo{auth};
  539     if ( $auth ne "none" ) { $req->authorization_basic( split ( /:/, $auth, 2 ) ); }
  540 
  541     my $proxyAuth = $siteinfo{proxyauth};
  542     if ( $proxyAuth ne "none" ) { $req->proxy_authorization_basic( split ( /:/, $proxyAuth, 2 ) ); }
  543     
  544     #$req->push_header("Accept" => "text/html, text/plain, text/*, */*");
  545     
  546     my $compress_options = "identity";
  547     if ($feature_compress) { $compress_options = "gzip, $compress_options"; }
  548     $req->push_header("Accept-Encoding" => $compress_options);
  549 
  550     return $req;
  551 }
  552 
  553 # Mail message
  554 # Params: message, subject, recipient
  555 # Returns: none
  556 sub MailMessage() {
  557     my $message    = shift (@_);
  558     my $subject    = shift (@_);
  559     my @recipients = split /,/, shift (@_);
  560     my $from       = shift (@_);
  561 
  562     foreach $email (@recipients) {
  563         $req = HTTP::Request->new( POST => "mailto:" . $email );
  564         if ( $from ne "" ) {
  565             $req->header( "From",   $from );
  566             $req->header( "Sender", $from );
  567         }
  568         $req->header( "Subject",      $subject );
  569         $req->header( "Content-type", "text/plain; charset=us-ascii" );
  570         $req->header( "Content-Transfer-Encoding", "7bit" );
  571         $req->header( "MIME-Version",              "1.0" );
  572         $req->content($message);
  573 
  574         $ua = new LWP::UserAgent;
  575         $ua->request($req);
  576     }
  577 }
  578 
  579 # Mail HTML document.
  580 # Params: filename, subject, recipient
  581 # Returns: none
  582 sub MailDocument() {
  583     my $filename   = shift (@_);
  584     my $subject    = shift (@_);
  585     my @recipients = split /,/, shift (@_);
  586     my $from       = shift (@_);
  587     my $tmpstr     = $/;
  588 
  589     undef $/;
  590     open( FILE, "$filename" ) or die "Cannot open $filename: $!\n";
  591     my $content = <FILE>;
  592     close(FILE);
  593 
  594     foreach $email (@recipients) {
  595         $req = HTTP::Request->new( POST => "mailto:" . $email );
  596         if ( $from ne "" ) {
  597             $req->header( "From",   $from );
  598             $req->header( "Sender", $from );
  599         }
  600         $req->header( "Subject",                   $subject );
  601         $req->header( "Content-type",              "text/html" );
  602         $req->header( "Content-Transfer-Encoding", "7bit" );
  603         $req->header( "MIME-Version",              "1.0" );
  604         $req->content($content);
  605 
  606         $ua = new LWP::UserAgent;
  607         my $resp = $ua->request($req);
  608         die "Error mailing document: ".$resp->message()."\n" if $resp->is_error;
  609     }
  610 
  611     $/ = $tmpstr;
  612 }
  613 
  614 sub ShowDocument() {
  615     my ( $program, $outgoing ) = @_;
  616     my $status;
  617 
  618     # special handling for mozilla, try to use remoting...
  619     if ( $program eq "mozilla" ) {
  620         $status = system("mozilla -remote \"ping()\"");
  621 
  622         # print "Status after ping: ".$status."\n";
  623 
  624 # if ping() returns ne 0, mozilla is not running, we cannot use openurl()
  625         if ( $status ne 0 ) {
  626             $status = system( "mozilla", $outgoing );
  627             if ( $status ne 0 ) {
  628                 print "Running mozilla returned status: " . $status . "\n";
  629             }
  630         }
  631         else {
  632             $status =
  633               system(
  634                 "mozilla -remote \"openurl(file:" . $outgoing . ",new-tab)\"" );
  635             if ( $status ne 0 ) {
  636                 print "Running mozilla returned status: " . $status . "\n";
  637             }
  638         }
  639     }
  640     elsif ($program eq "konqueror") {
  641         # konqueror from KDE has a small client application that helps with opening urls
  642         # run 'kfmclient --commands' for help about the available commandline options
  643         $status = system( "kfmclient openURL ".$outgoing." text/html" );
  644         if ( $status ne 0 ) {
  645             print "Displaying URL in konqueror returned status: " . $status . "\n";
  646         }
  647     }
  648     else {
  649 
  650         # other applications are currently just started
  651         $status = system( $program, $outgoing );
  652         if ( $status ne 0 ) {
  653             print "Application " . $program
  654               . " returned status: " . $status . "\n";
  655         }
  656     }
  657 }
  658 
  659 __END__
  660 
  661 =head1 NAME
  662 
  663 websec - Web Secretary
  664 
  665 =head1 SYNOPSIS
  666 
  667 websec [options]
  668 
  669 
  670 =head1 OPTIONS
  671 
  672 =over 8
  673 
  674 =item B<--help>
  675 
  676 Print a brief help message and exits.
  677 
  678 =item B<--man>
  679 
  680 Prints the manual page and exits.
  681 
  682 =item B<--base>
  683 
  684 Base directory for configuration (~/.websec by default)
  685 
  686 =item B<--urllist>
  687 
  688 Use another file for the url list, by default it is "url.list".
  689 
  690 =back
  691 
  692 =head1 DESCRIPTION
  693 
  694 B<websec> is a web page monitoring software.  It will send you a changed web
  695 page with the contents highlighted.
  696 
  697 The base directory is the place from which B<websec> will read the config files
  698 and in which it will store its data.
  699 
  700 When called without an argument, B<websec> will look for a base directory.
  701 If the current directory has url.list it will use it, otherwise it will try to
  702 use I<$HOME/.websec/>. You can also override this process with the I<--base>
  703 option.
  704 
  705 You can add a line like I<AddSubject = [websec]> to url.list, websec will add
  706 I<[websec]> to every subject as a first word when mail is sent. You can then
  707 easily detect this line by a mail filter.
  708 
  709 The keywords I<Retry>, I<Retrywait>, and I<Timeout> in url.list lets you specify
  710 the number of times to retry, time to wait between retries, and a timeout
  711 setting.
  712 
  713 B<Websec> waits for a random number of seconds between retries up to the value
  714 specified by the I<Randomwait> keyword. This is to prevent websec from being
  715 blocked by websites that perform log analysis to find time similarities between
  716 requests.
  717 
  718 
  719 =head1 SEE ALSO
  720 
  721 /usr/share/doc/websec/README.gz, L<url.list(5)>, L<ignore.list(5)>, L<webdiff(1)>.
  722 
  723 
  724 =head1 AUTHOR
  725 
  726 Victor Chew is the original author of this software,
  727 Baruch Even is continuing the maintenance and
  728 Joop Stakenborg <pa3aba@debian.org> provided this man page, 
  729 
  730 =cut
  731 
  732 vim:set et ts=4: