"Fossies" - the Fresh Open Source Software Archive

Member "install-tl-20200916/tlpkg/tlperl/site/lib/LWP/RobotUA.pm" (7 Mar 2020, 7893 Bytes) of package /windows/misc/install-tl.zip:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) Perl source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 package LWP::RobotUA;
    2 
    3 use base qw(LWP::UserAgent);
    4 
    5 our $VERSION = '6.43';
    6 
    7 require WWW::RobotRules;
    8 require HTTP::Request;
    9 require HTTP::Response;
   10 
   11 use Carp ();
   12 use HTTP::Status ();
   13 use HTTP::Date qw(time2str);
   14 use strict;
   15 
   16 
   17 #
   18 # Additional attributes in addition to those found in LWP::UserAgent:
   19 #
   20 # $self->{'delay'}    Required delay between request to the same
   21 #                     server in minutes.
   22 #
   23 # $self->{'rules'}     A WWW::RobotRules object
   24 #
   25 
   26 sub new
   27 {
   28     my $class = shift;
   29     my %cnf;
   30     if (@_ < 4) {
   31     # legacy args
   32     @cnf{qw(agent from rules)} = @_;
   33     }
   34     else {
   35     %cnf = @_;
   36     }
   37 
   38     Carp::croak('LWP::RobotUA agent required') unless $cnf{agent};
   39     Carp::croak('LWP::RobotUA from address required')
   40     unless $cnf{from} && $cnf{from} =~ m/\@/;
   41 
   42     my $delay = delete $cnf{delay} || 1;
   43     my $use_sleep = delete $cnf{use_sleep};
   44     $use_sleep = 1 unless defined($use_sleep);
   45     my $rules = delete $cnf{rules};
   46 
   47     my $self = LWP::UserAgent->new(%cnf);
   48     $self = bless $self, $class;
   49 
   50     $self->{'delay'} = $delay;   # minutes
   51     $self->{'use_sleep'} = $use_sleep;
   52 
   53     if ($rules) {
   54     $rules->agent($cnf{agent});
   55     $self->{'rules'} = $rules;
   56     }
   57     else {
   58     $self->{'rules'} = WWW::RobotRules->new($cnf{agent});
   59     }
   60 
   61     $self;
   62 }
   63 
   64 
   65 sub delay     { shift->_elem('delay',     @_); }
   66 sub use_sleep { shift->_elem('use_sleep', @_); }
   67 
   68 
   69 sub agent
   70 {
   71     my $self = shift;
   72     my $old = $self->SUPER::agent(@_);
   73     if (@_) {
   74     # Changing our name means to start fresh
   75     $self->{'rules'}->agent($self->{'agent'});
   76     }
   77     $old;
   78 }
   79 
   80 
   81 sub rules {
   82     my $self = shift;
   83     my $old = $self->_elem('rules', @_);
   84     $self->{'rules'}->agent($self->{'agent'}) if @_;
   85     $old;
   86 }
   87 
   88 
   89 sub no_visits
   90 {
   91     my($self, $netloc) = @_;
   92     $self->{'rules'}->no_visits($netloc) || 0;
   93 }
   94 
   95 *host_count = \&no_visits;  # backwards compatibility with LWP-5.02
   96 
   97 
   98 sub host_wait
   99 {
  100     my($self, $netloc) = @_;
  101     return undef unless defined $netloc;
  102     my $last = $self->{'rules'}->last_visit($netloc);
  103     if ($last) {
  104     my $wait = int($self->{'delay'} * 60 - (time - $last));
  105     $wait = 0 if $wait < 0;
  106     return $wait;
  107     }
  108     return 0;
  109 }
  110 
  111 
  112 sub simple_request
  113 {
  114     my($self, $request, $arg, $size) = @_;
  115 
  116     # Do we try to access a new server?
  117     my $allowed = $self->{'rules'}->allowed($request->uri);
  118 
  119     if ($allowed < 0) {
  120     # Host is not visited before, or robots.txt expired; fetch "robots.txt"
  121     my $robot_url = $request->uri->clone;
  122     $robot_url->path("robots.txt");
  123     $robot_url->query(undef);
  124 
  125     # make access to robot.txt legal since this will be a recursive call
  126     $self->{'rules'}->parse($robot_url, "");
  127 
  128     my $robot_req = HTTP::Request->new('GET', $robot_url);
  129     my $parse_head = $self->parse_head(0);
  130     my $robot_res = $self->request($robot_req);
  131     $self->parse_head($parse_head);
  132     my $fresh_until = $robot_res->fresh_until;
  133     my $content = "";
  134     if ($robot_res->is_success && $robot_res->content_is_text) {
  135         $content = $robot_res->decoded_content;
  136         $content = "" unless $content && $content =~ /^\s*Disallow\s*:/mi;
  137     }
  138     $self->{'rules'}->parse($robot_url, $content, $fresh_until);
  139 
  140     # recalculate allowed...
  141     $allowed = $self->{'rules'}->allowed($request->uri);
  142     }
  143 
  144     # Check rules
  145     unless ($allowed) {
  146     my $res = HTTP::Response->new(
  147       HTTP::Status::RC_FORBIDDEN, 'Forbidden by robots.txt');
  148     $res->request( $request ); # bind it to that request
  149     return $res;
  150     }
  151 
  152     my $netloc = eval { local $SIG{__DIE__}; $request->uri->host_port; };
  153     my $wait = $self->host_wait($netloc);
  154 
  155     if ($wait) {
  156     if ($self->{'use_sleep'}) {
  157         sleep($wait)
  158     }
  159     else {
  160         my $res = HTTP::Response->new(
  161           HTTP::Status::RC_SERVICE_UNAVAILABLE, 'Please, slow down');
  162         $res->header('Retry-After', time2str(time + $wait));
  163         $res->request( $request ); # bind it to that request
  164         return $res;
  165     }
  166     }
  167 
  168     # Perform the request
  169     my $res = $self->SUPER::simple_request($request, $arg, $size);
  170 
  171     $self->{'rules'}->visit($netloc);
  172 
  173     $res;
  174 }
  175 
  176 
  177 sub as_string
  178 {
  179     my $self = shift;
  180     my @s;
  181     push(@s, "Robot: $self->{'agent'} operated by $self->{'from'}  [$self]");
  182     push(@s, "    Minimum delay: " . int($self->{'delay'}*60) . "s");
  183     push(@s, "    Will sleep if too early") if $self->{'use_sleep'};
  184     push(@s, "    Rules = $self->{'rules'}");
  185     join("\n", @s, '');
  186 }
  187 
  188 1;
  189 
  190 
  191 __END__
  192 
  193 =pod
  194 
  195 =head1 NAME
  196 
  197 LWP::RobotUA - a class for well-behaved Web robots
  198 
  199 =head1 SYNOPSIS
  200 
  201   use LWP::RobotUA;
  202   my $ua = LWP::RobotUA->new('my-robot/0.1', 'me@foo.com');
  203   $ua->delay(10);  # be very nice -- max one hit every ten minutes!
  204   ...
  205 
  206   # Then just use it just like a normal LWP::UserAgent:
  207   my $response = $ua->get('http://whatever.int/...');
  208   ...
  209 
  210 =head1 DESCRIPTION
  211 
  212 This class implements a user agent that is suitable for robot
  213 applications.  Robots should be nice to the servers they visit.  They
  214 should consult the F</robots.txt> file to ensure that they are welcomed
  215 and they should not make requests too frequently.
  216 
  217 But before you consider writing a robot, take a look at
  218 L<URL:http://www.robotstxt.org/>.
  219 
  220 When you use an I<LWP::RobotUA> object as your user agent, then you do not
  221 really have to think about these things yourself; C<robots.txt> files
  222 are automatically consulted and obeyed, the server isn't queried
  223 too rapidly, and so on.  Just send requests
  224 as you do when you are using a normal I<LWP::UserAgent>
  225 object (using C<< $ua->get(...) >>, C<< $ua->head(...) >>,
  226 C<< $ua->request(...) >>, etc.), and this
  227 special agent will make sure you are nice.
  228 
  229 =head1 METHODS
  230 
  231 The LWP::RobotUA is a sub-class of L<LWP::UserAgent> and implements the
  232 same methods. In addition the following methods are provided:
  233 
  234 =head2 new
  235 
  236     my $ua = LWP::RobotUA->new( %options )
  237     my $ua = LWP::RobotUA->new( $agent, $from )
  238     my $ua = LWP::RobotUA->new( $agent, $from, $rules )
  239 
  240 The LWP::UserAgent options C<agent> and C<from> are mandatory.  The
  241 options C<delay>, C<use_sleep> and C<rules> initialize attributes
  242 private to the RobotUA.  If C<rules> are not provided, then
  243 C<WWW::RobotRules> is instantiated providing an internal database of
  244 F<robots.txt>.
  245 
  246 It is also possible to just pass the value of C<agent>, C<from> and
  247 optionally C<rules> as plain positional arguments.
  248 
  249 =head2 delay
  250 
  251     my $delay = $ua->delay;
  252     $ua->delay( $minutes );
  253 
  254 Get/set the minimum delay between requests to the same server, in
  255 I<minutes>.  The default is C<1> minute.  Note that this number doesn't
  256 have to be an integer; for example, this sets the delay to C<10> seconds:
  257 
  258     $ua->delay(10/60);
  259 
  260 =head2 use_sleep
  261 
  262     my $bool = $ua->use_sleep;
  263     $ua->use_sleep( $boolean );
  264 
  265 Get/set a value indicating whether the UA should L<LWP::RobotUA/sleep> if
  266 requests arrive too fast, defined as C<< $ua->delay >> minutes not passed since
  267 last request to the given server.  The default is true.  If this value is
  268 false then an internal C<SERVICE_UNAVAILABLE> response will be generated.
  269 It will have a C<Retry-After> header that indicates when it is OK to
  270 send another request to this server.
  271 
  272 =head2 rules
  273 
  274     my $rules = $ua->rules;
  275     $ua->rules( $rules );
  276 
  277 Set/get which I<WWW::RobotRules> object to use.
  278 
  279 =head2 no_visits
  280 
  281     my $num = $ua->no_visits( $netloc )
  282 
  283 Returns the number of documents fetched from this server host. Yeah I
  284 know, this method should probably have been named C<num_visits> or
  285 something like that. :-(
  286 
  287 =head2 host_wait
  288 
  289     my $num = $ua->host_wait( $netloc )
  290 
  291 Returns the number of I<seconds> (from now) you must wait before you can
  292 make a new request to this host.
  293 
  294 =head2 as_string
  295 
  296     my $string = $ua->as_string;
  297 
  298 Returns a string that describes the state of the UA.
  299 Mainly useful for debugging.
  300 
  301 =head1 SEE ALSO
  302 
  303 L<LWP::UserAgent>, L<WWW::RobotRules>
  304 
  305 =head1 COPYRIGHT
  306 
  307 Copyright 1996-2004 Gisle Aas.
  308 
  309 This library is free software; you can redistribute it and/or
  310 modify it under the same terms as Perl itself.
  311 
  312 =cut