- From: Olivier Thereaux via cvs-syncmail <cvsmail@w3.org>
- Date: Tue, 18 Sep 2007 06:09:58 +0000
- To: www-validator-cvs@w3.org
Update of /sources/public/perl/modules/W3C/LinkChecker/bin
In directory hutz:/tmp/cvs-serv24155
Modified Files:
checklink
Log Message:
error message rewriting. No need to scream at the user
Index: checklink
===================================================================
RCS file: /sources/public/perl/modules/W3C/LinkChecker/bin/checklink,v
retrieving revision 4.71
retrieving revision 4.72
diff -u -d -r4.71 -r4.72
--- checklink 18 Sep 2007 06:06:32 -0000 4.71
+++ checklink 18 Sep 2007 06:09:56 -0000 4.72
@@ -1969,7 +1969,7 @@
400 => 'Usually the sign of a malformed URL that cannot be parsed by the server.',
401 => "The link is not public. You'd better specify it.",
403 => 'The link is forbidden! This needs fixing. Usual suspects: a missing index.html or Overview.html, or a missing ACL.',
- 404 => 'The link is broken. Fix it NOW!',
+ 404 => 'The link is broken. Fix it.',
405 => 'The server does not allow HEAD requests. Go ask the guys who run this server why. Check the link manually.',
406 => "The server isn't capable of responding according to the Accept* headers sent. Check it out.",
407 => 'The link is a proxy, but requires Authentication.',
@@ -1981,7 +1981,7 @@
503 => 'The server cannot service the request, for some unknown reason.',
# Non-HTTP codes:
RC_ROBOTS_TXT() => "The link was not checked due to <a href=\"http://www.robotstxt.org/wc/exclusion.html#robotstxt\">robots exclusion rules</a>. Check the link manually, and see also the link checker <a href=\"$Cfg{Doc_URI}#bot\">documentation on robots exclusion</a>.",
- RC_DNS_ERROR() => 'The hostname could not be resolved. This link needs to be fixed.',
+ RC_DNS_ERROR() => 'The hostname could not be resolved. Check the link for typos.',
);
my %priority = ( 410 => 1,
404 => 2,
Received on Tuesday, 18 September 2007 06:36:35 UTC