"Fossies" - the Fresh Open Source Software Archive  

Source code changes of the file "lib/BackupPC/CGI/View.pm" between
BackupPC-4.3.2.tar.gz and BackupPC-4.4.0.tar.gz

About: BackupPC is a high-performance, enterprise-grade system for backing up Linux and WinXX PCs and laptops to a server’s disk (http/cgi user interface).

View.pm  (BackupPC-4.3.2):View.pm  (BackupPC-4.4.0)
skipping to change at line 30 skipping to change at line 30
# This program is distributed in the hope that it will be useful, # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of # but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details. # GNU General Public License for more details.
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>. # along with this program. If not, see <http://www.gnu.org/licenses/>.
# #
#======================================================================== #========================================================================
# #
# Version 4.3.2, released 17 Feb 2020. # Version 4.4.0, released 20 Jun 2020.
# #
# See http://backuppc.sourceforge.net. # See http://backuppc.sourceforge.net.
# #
#======================================================================== #========================================================================
package BackupPC::CGI::View; package BackupPC::CGI::View;
use strict; use strict;
use BackupPC::CGI::Lib qw(:all); use BackupPC::CGI::Lib qw(:all);
use BackupPC::XS; use BackupPC::XS;
use Encode qw/decode_utf8/; use Encode qw/decode_utf8/;
sub action sub action
{ {
my $Privileged = CheckPermission($In{host}); my $Privileged = CheckPermission($In{host});
my $compress = 0; my $compress = 0;
my $fh; my $fh;
my $host = $In{host}; my $host = $In{host};
my $num = $In{num}; my $num = $In{num};
my $type = $In{type}; my $type = $In{type};
my $linkHosts = 0; my $linkHosts = 0;
my($file, $comment); my($file, $comment);
my $ext = $num ne "" ? ".$num" : ""; my $ext = $num ne "" ? ".$num" : "";
ErrorExit(eval("qq{$Lang->{Invalid_number__num}}")) ErrorExit(eval("qq{$Lang->{Invalid_number__num}}"))
if ( $num ne "" && $num !~ /^\d+$/ ); if ( $num ne "" && $num !~ /^\d+$/ );
if ( $type eq "poolUsage" && $Privileged ) { if ( $type eq "poolUsage" && $Privileged ) {
$file = "$LogDir/poolUsage$num.png"; $file = "$LogDir/poolUsage$num.png";
if ( open($fh, "<", $file) ) { if ( open($fh, "<", $file) ) {
my $data; my $data;
print "Content-Type: image/png\r\n"; print "Content-Type: image/png\r\n";
print "Content-Transfer-Encoding: binary\r\n\r\n"; print "Content-Transfer-Encoding: binary\r\n\r\n";
while ( sysread($fh, $data, 1024 * 1024) > 0 ) { while ( sysread($fh, $data, 1024 * 1024) > 0 ) {
print $data; print $data;
} }
close($fh); close($fh);
} }
return; return;
} }
if ( $type ne "docs" && !$Privileged ) { if ( $type ne "docs" && !$Privileged ) {
ErrorExit($Lang->{Only_privileged_users_can_view_log_or_config_files}); ErrorExit($Lang->{Only_privileged_users_can_view_log_or_config_files});
} }
if ( $type eq "XferLOG" ) { if ( $type eq "XferLOG" ) {
$file = "$TopDir/pc/$host/SmbLOG$ext"; $file = "$TopDir/pc/$host/SmbLOG$ext";
$file = "$TopDir/pc/$host/XferLOG$ext" if ( !-f $file && !-f "$file.z"); $file = "$TopDir/pc/$host/XferLOG$ext" if ( !-f $file && !-f "$file.z" ) ;
} elsif ( $type eq "XferLOGbad" ) { } elsif ( $type eq "XferLOGbad" ) {
$file = "$TopDir/pc/$host/SmbLOG.bad"; $file = "$TopDir/pc/$host/SmbLOG.bad";
$file = "$TopDir/pc/$host/XferLOG.bad" if ( !-f $file && !-f "$file.z"); $file = "$TopDir/pc/$host/XferLOG.bad" if ( !-f $file && !-f "$file.z" ) ;
} elsif ( $type eq "XferErrbad" ) { } elsif ( $type eq "XferErrbad" ) {
$file = "$TopDir/pc/$host/SmbLOG.bad"; $file = "$TopDir/pc/$host/SmbLOG.bad";
$file = "$TopDir/pc/$host/XferLOG.bad" if ( !-f $file && !-f "$file.z"); $file = "$TopDir/pc/$host/XferLOG.bad" if ( !-f $file && !-f "$file.z
" );
$comment = $Lang->{Extracting_only_Errors}; $comment = $Lang->{Extracting_only_Errors};
} elsif ( $type eq "XferErr" ) { } elsif ( $type eq "XferErr" ) {
$file = "$TopDir/pc/$host/SmbLOG$ext"; $file = "$TopDir/pc/$host/SmbLOG$ext";
$file = "$TopDir/pc/$host/XferLOG$ext" if ( !-f $file && !-f "$file.z"); $file = "$TopDir/pc/$host/XferLOG$ext" if ( !-f $file && !-f "$file.z
" );
$comment = $Lang->{Extracting_only_Errors}; $comment = $Lang->{Extracting_only_Errors};
} elsif ( $type eq "RestoreLOG" ) { } elsif ( $type eq "RestoreLOG" ) {
$file = "$TopDir/pc/$host/RestoreLOG$ext"; $file = "$TopDir/pc/$host/RestoreLOG$ext";
} elsif ( $type eq "RestoreErr" ) { } elsif ( $type eq "RestoreErr" ) {
$file = "$TopDir/pc/$host/RestoreLOG$ext"; $file = "$TopDir/pc/$host/RestoreLOG$ext";
$comment = $Lang->{Extracting_only_Errors}; $comment = $Lang->{Extracting_only_Errors};
} elsif ( $type eq "ArchiveLOG" ) { } elsif ( $type eq "ArchiveLOG" ) {
$file = "$TopDir/pc/$host/ArchiveLOG$ext"; $file = "$TopDir/pc/$host/ArchiveLOG$ext";
} elsif ( $type eq "ArchiveErr" ) { } elsif ( $type eq "ArchiveErr" ) {
$file = "$TopDir/pc/$host/ArchiveLOG$ext"; $file = "$TopDir/pc/$host/ArchiveLOG$ext";
$comment = $Lang->{Extracting_only_Errors}; $comment = $Lang->{Extracting_only_Errors};
} elsif ( $type eq "config" ) { } elsif ( $type eq "config" ) {
# Note: only works for Storage::Text # Note: only works for Storage::Text
$file = $bpc->{storage}->ConfigPath($host); $file = $bpc->{storage}->ConfigPath($host);
} elsif ( $type eq "hosts" ) { } elsif ( $type eq "hosts" ) {
# Note: only works for Storage::Text # Note: only works for Storage::Text
$file = $bpc->ConfDir() . "/hosts"; $file = $bpc->ConfDir() . "/hosts";
$linkHosts = 1; $linkHosts = 1;
} elsif ( $type eq "docs" ) { } elsif ( $type eq "docs" ) {
$file = $bpc->InstallDir() . "/share/doc/BackupPC/BackupPC.html"; $file = $bpc->InstallDir() . "/share/doc/BackupPC/BackupPC.html";
} elsif ( $host ne "" ) { } elsif ( $host ne "" ) {
if ( !defined($In{num}) ) { if ( !defined($In{num}) ) {
# get the latest LOG file # get the latest LOG file
$file = ($bpc->sortedPCLogFiles($host))[0]; $file = ($bpc->sortedPCLogFiles($host))[0];
$file =~ s/\.z$//; $file =~ s/\.z$//;
} else { } else {
$file = "$TopDir/pc/$host/LOG$ext"; $file = "$TopDir/pc/$host/LOG$ext";
} }
$linkHosts = 1; $linkHosts = 1;
} else { } else {
$file = "$LogDir/LOG$ext"; $file = "$LogDir/LOG$ext";
$linkHosts = 1; $linkHosts = 1;
} }
if ( !-f $file && -f "$file.z" ) { if ( !-f $file && -f "$file.z" ) {
$file .= ".z"; $file .= ".z";
$compress = 1; $compress = 1;
} }
my($contentPre, $contentSub, $contentPost); my($contentPre, $contentSub, $contentPost);
$contentPre .= eval("qq{$Lang->{Log_File__file__comment}}"); $contentPre .= eval("qq{$Lang->{Log_File__file__comment}}");
if ( $file ne "" if ( $file ne "" && defined($fh = BackupPC::XS::FileZIO::open($file, 0, $com
&& defined($fh = BackupPC::XS::FileZIO::open($file, 0, $compress)) ) press)) ) {
{
my $mtimeStr = $bpc->timeStamp((stat($file))[9], 1); my $mtimeStr = $bpc->timeStamp((stat($file))[9], 1);
$contentPre .= eval("qq{$Lang->{Contents_of_log_file}}"); $contentPre .= eval("qq{$Lang->{Contents_of_log_file}}");
$contentPre .= "<pre>"; $contentPre .= "<pre>";
if ( $type eq "XferErr" || $type eq "XferErrbad" if ( $type eq "XferErr"
|| $type eq "RestoreErr" || $type eq "XferErrbad"
|| $type eq "ArchiveErr" ) { || $type eq "RestoreErr"
$contentSub = sub { || $type eq "ArchiveErr" ) {
# $contentSub = sub {
# Because the content might be large, we use #
# a sub to return the data in 64K chunks. # Because the content might be large, we use
# # a sub to return the data in 64K chunks.
my($skipped, $c, $s); #
while ( length($c) < 65536 ) { my($skipped, $c, $s);
$s = $fh->readLine(); while ( length($c) < 65536 ) {
if ( $s eq "" ) { $s = $fh->readLine();
$c .= eval("qq{$Lang->{skipped__skipped_lines}}") if ( $s eq "" ) {
if ( $skipped ); $c .= eval("qq{$Lang->{skipped__skipped_lines}}")
last; if ( $skipped );
} last;
$s =~ s/[\n\r]+//g; }
if ( $s =~ /smb: \\>/ $s =~ s/[\n\r]+//g;
|| $s =~ /^\s*(\d+) \(\s*\d+\.\d kb\/s\) (.*)$/ if ( $s =~ /smb: \\>/
|| $s =~ /^tar: dumped \d+ files/ || $s =~ /^\s*(\d+) \(\s*\d+\.\d kb\/s\) (.*)$/
|| $s =~ /^\s*added interface/i || $s =~ /^tar: dumped \d+ files/
|| $s =~ /^\s*restore tar file /i || $s =~ /^\s*added interface/i
|| $s =~ /^\s*restore directory /i || $s =~ /^\s*restore tar file /i
|| $s =~ /^\s*tarmode is now/i || $s =~ /^\s*restore directory /i
|| $s =~ /^\s*Total bytes written/i || $s =~ /^\s*tarmode is now/i
|| $s =~ /^\s*Domain=/i || $s =~ /^\s*Total bytes written/i
|| $s =~ /^\s*Getting files newer than/i || $s =~ /^\s*Domain=/i
|| $s =~ /^\s*Output is \/dev\/null/ || $s =~ /^\s*Getting files newer than/i
|| $s =~ /^\s*\([\d.,]* kb\/s\) \(average [\d\.]* kb\ || $s =~ /^\s*Output is \/dev\/null/
/s\)$/ || $s =~ /^\s*\([\d.,]* kb\/s\) \(average [\d\.]* kb\/s\
|| $s =~ /^\s+directory \\/ )$/
|| $s =~ /^\s*Timezone is/ || $s =~ /^\s+directory \\/
|| $s =~ /^\s*creating lame (up|low)case table/i || $s =~ /^\s*Timezone is/
|| $s =~ /^\.\// || $s =~ /^\s*creating lame (up|low)case table/i
|| $s =~ /^ / ) { || $s =~ /^\.\//
$skipped++; || $s =~ /^ / ) {
next; $skipped++;
} next;
$c .= eval("qq{$Lang->{skipped__skipped_lines}}") }
if ( $skipped ); $c .= eval("qq{$Lang->{skipped__skipped_lines}}")
$skipped = 0; if ( $skipped );
$c .= decode_utf8(${EscHTML($s)}) . "\n"; $skipped = 0;
} $c .= decode_utf8(${EscHTML($s)}) . "\n";
return $c; }
}; return $c;
};
} elsif ( $linkHosts ) { } elsif ( $linkHosts ) {
# #
# Because the content might be large, we use # Because the content might be large, we use
# a sub to return the data in 64K chunks. # a sub to return the data in 64K chunks.
# #
$contentSub = sub { $contentSub = sub {
my($c, $s); my($c, $s);
while ( length($c) < 65536 ) { while ( length($c) < 65536 ) {
$s = $fh->readLine(); $s = $fh->readLine();
last if ( $s eq "" ); last if ( $s eq "" );
$s =~ s/[\n\r]+//g; $s =~ s/[\n\r]+//g;
$s = ${EscHTML($s)}; $s = ${EscHTML($s)};
$s =~ s/\b([\w-.]+)\b/defined($Hosts->{$1}) $s =~ s/\b([\w-.]+)\b/defined($Hosts->{$1})
? ${HostLink($1)} : $1/eg; ? ${HostLink($1)} : $1/eg;
$c .= decode_utf8($s) . "\n"; $c .= decode_utf8($s) . "\n";
} }
return $c; return $c;
}; };
} elsif ( $type eq "config" ) { } elsif ( $type eq "config" ) {
# #
# Because the content might be large, we use # Because the content might be large, we use
# a sub to return the data in 64K chunks. # a sub to return the data in 64K chunks.
# #
$contentSub = sub { $contentSub = sub {
my($c, $s); my($c, $s);
while ( length($c) < 65536 ) { while ( length($c) < 65536 ) {
$s = $fh->readLine(); $s = $fh->readLine();
last if ( $s eq "" ); last if ( $s eq "" );
$s =~ s/[\n\r]+//g; $s =~ s/[\n\r]+//g;
# remove any passwords and user names
$s =~ s/(SmbSharePasswd.*=.*['"]).*(['"])/$1****$2/ig; # remove any passwords and user names
$s =~ s/(SmbShareUserName.*=.*['"]).*(['"])/$1****$2/ig; $s =~ s/(SmbSharePasswd.*=.*['"]).*(['"])/$1****$2/ig;
$s =~ s/(RsyncdPasswd.*=.*['"]).*(['"])/$1****$2/ig; $s =~ s/(SmbShareUserName.*=.*['"]).*(['"])/$1****$2/ig;
$s =~ s/(ServerMesgSecret.*=.*['"]).*(['"])/$1****$2/ig; $s =~ s/(RsyncdPasswd.*=.*['"]).*(['"])/$1****$2/ig;
$s = ${EscHTML($s)}; $s =~ s/(ServerMesgSecret.*=.*['"]).*(['"])/$1****$2/ig;
$s =~ s[(\$Conf\{.*?\})][ $s = ${EscHTML($s)};
$s =~ s[(\$Conf\{.*?\})][
my $c = $1; my $c = $1;
my $s = lc($c); my $s = lc($c);
$s =~ s{(\W)}{_}g; $s =~ s{(\W)}{_}g;
"<a href=\"?action=view&type=docs#item_$s\"><tt>$c</tt></ a>" "<a href=\"?action=view&type=docs#item_$s\"><tt>$c</tt></ a>"
]eg; ]eg;
$c .= decode_utf8($s) . "\n"; $c .= decode_utf8($s) . "\n";
} }
return $c; return $c;
}; };
} elsif ( $type eq "docs" ) { } elsif ( $type eq "docs" ) {
# #
# Because the content might be large, we use # Because the content might be large, we use
# a sub to return the data in 64K chunks. # a sub to return the data in 64K chunks.
# #
$contentSub = sub { $contentSub = sub {
my($c, $s); my($c, $s);
while ( length($c) < 65536 ) { while ( length($c) < 65536 ) {
$s = $fh->readLine(); $s = $fh->readLine();
last if ( $s eq "" ); last if ( $s eq "" );
$c .= decode_utf8($s); $c .= decode_utf8($s);
} }
return $c; return $c;
}; };
# #
# Documentation has a different header and no pre or post text, # Documentation has a different header and no pre or post text,
# so just handle it here # so just handle it here
# #
Header($Lang->{BackupPC__Documentation}, "", 0, $contentSub); Header($Lang->{BackupPC__Documentation}, "", 0, $contentSub);
Trailer(); Trailer();
return; return;
} else { } else {
# #
# Because the content might be large, we use # Because the content might be large, we use
# a sub to return the data in 64K chunks. # a sub to return the data in 64K chunks.
# #
$contentSub = sub { $contentSub = sub {
my($c, $s); my($c, $s);
while ( length($c) < 65536 ) { while ( length($c) < 65536 ) {
$s = $fh->readLine(); $s = $fh->readLine();
last if ( $s eq "" ); last if ( $s eq "" );
$s =~ s/[\n\r]+//g; $s =~ s/[\n\r]+//g;
$s = ${EscHTML($s)}; $s = ${EscHTML($s)};
$c .= decode_utf8($s) . "\n"; $c .= decode_utf8($s) . "\n";
} }
return $c; return $c;
}; };
} }
} else { } else {
if ( $type eq "docs" ) { if ( $type eq "docs" ) {
ErrorExit(eval("qq{$Lang->{Unable_to_open__file__configuration_proble ErrorExit(eval("qq{$Lang->{Unable_to_open__file__configuration_probl
m}}")); em}}"));
} }
$contentPre .= eval("qq{$Lang->{_pre___Can_t_open_log_file__file}}"); $contentPre .= eval("qq{$Lang->{_pre___Can_t_open_log_file__file}}");
} }
$contentPost .= "</pre>\n" if ( $type ne "docs" ); $contentPost .= "</pre>\n" if ( $type ne "docs" );
Header(eval("qq{$Lang->{Backup_PC__Log_File__file}}"), Header(
$contentPre, !-f "$TopDir/pc/$host/backups", eval("qq{$Lang->{Backup_PC__Log_File__file}}"),
$contentSub, $contentPost); $contentPre, !-f "$TopDir/pc/$host/backups",
$contentSub, $contentPost
);
Trailer(); Trailer();
$fh->close() if ( defined($fh) ); $fh->close() if ( defined($fh) );
} }
1; 1;
 End of changes. 28 change blocks. 
141 lines changed or deleted 147 lines changed or added

Home  |  About  |  Features  |  All  |  Newest  |  Dox  |  Diffs  |  RSS Feeds  |  Screenshots  |  Comments  |  Imprint  |  Privacy  |  HTTP(S)