fixed crawler bug caused by NPE in logging

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@7033 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 15 years ago
parent a82a93f2fc
commit 0d81731e88

@ -28,6 +28,7 @@
package de.anomic.crawler;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Date;
@ -361,9 +362,10 @@ public final class CrawlStacker {
// check if this is a local address and we are allowed to index local pages:
//boolean local = hostAddress.isSiteLocalAddress() || hostAddress.isLoopbackAddress();
//assert local == yacyURL.isLocalDomain(url.hash()); // TODO: remove the dnsResolve above!
InetAddress ia = Domains.dnsResolve(host);
return (local) ?
("the host '" + host + "' is local, but local addresses are not accepted: " + Domains.dnsResolve(host).getHostAddress()) :
("the host '" + host + "' is global, but global addresses are not accepted: " + Domains.dnsResolve(host).getHostAddress());
("the host '" + host + "' is local, but local addresses are not accepted: " + ((ia == null) ? "null" : ia.getHostAddress())) :
("the host '" + host + "' is global, but global addresses are not accepted: " + ((ia == null) ? "null" : ia.getHostAddress()));
}
public String urlInAcceptedDomainHash(final byte[] urlhash) {

@ -132,7 +132,7 @@ public class ZURL implements Iterable<ZURL.Entry> {
Entry entry = new Entry(bentry, executor, workdate, workcount, anycause);
put(entry);
stack.add(entry.hash());
Log.logInfo("URL Errors", bentry.url().toNormalform(false, false) + " - " + anycause);
Log.logInfo("Rejected URL", bentry.url().toNormalform(false, false) + " - " + anycause);
while (stack.size() > maxStackSize) stack.poll();
}

Loading…
Cancel
Save