- From: Olivier Thereaux via cvs-syncmail <cvsmail@w3.org>
- Date: Tue, 25 Oct 2005 00:36:06 +0000
- To: www-validator-cvs@w3.org
Update of /sources/public/perl/modules/W3C/LinkChecker/bin
In directory hutz:/tmp/cvs-serv16410
Modified Files:
checklink
Log Message:
The (hopefully temporary) workaround for responses not storing the original
request should not be for cases of 401's, but 403 Forbidden by robots.txt
See:
http://lists.w3.org/Archives/Public/public-qa-dev/2005Sep/0002
Hopefully we should be able to get rid of this if we manage to get Dom's
patch to RobotUA accepted.
http://lists.w3.org/Archives/Public/public-qa-dev/2005Sep/0018.html
Index: checklink
===================================================================
RCS file: /sources/public/perl/modules/W3C/LinkChecker/bin/checklink,v
retrieving revision 4.23
retrieving revision 4.24
diff -u -d -r4.23 -r4.24
--- checklink 31 Aug 2005 09:53:00 -0000 4.23
+++ checklink 25 Oct 2005 00:36:04 -0000 4.24
@@ -1182,14 +1182,10 @@
foreach (keys %$entries) {
my $uri;
my $response = $entries->{$_}->response;
- if (defined $response->request) {
- $uri = $response->request->url();
- }
- else {
- # try (for 401s or other cases, back to the previous)
- $uri = $response->previous()->request()->url();
- }
- # Get the results
+ print $response->as_string;
+ next if ($response->code() == 403);
+ $uri = $response->request->url() || die "something went wrong with a request, exiting: $!";
+ # Get the results
# Record the very first response
if (! defined($code)) {
($code, $message) = delete(@$ua{qw(FirstResponse FirstMessage)});
@@ -1536,8 +1532,8 @@
+
foreach my $uri (keys %responses) {
-
my $response = $responses{$uri};
my $method = $methods{$uri};
# Get the information back from get_uri()
Received on Tuesday, 25 October 2005 00:36:18 UTC