// in best case, this should never happen if the balancer works propertly
// this is only to protection against the worst case, where the crawler could
// behave in a DoS-manner
Log.logInfo("BALANCER","forcing crawl-delay of "+sleeptime+" milliseconds for "+crawlEntry.url().getHost()+((sleeptime>Math.max(minimumLocalDelta,minimumGlobalDelta))?" (caused by robots.txt)" :""));
Log.logInfo("BALANCER","forcing crawl-delay of "+sleeptime+" milliseconds for "+crawlEntry.url().getHost()+((sleeptime>Math.max(minimumLocalDelta,minimumGlobalDelta))?" (forced latency)" :""));
Log.logInfo("kelondroBLOBHeap","wrote a dump for the "+this.index.size()+" index entries of "+heapFile.getName()+" in "+(System.currentTimeMillis()-start)+" milliseconds.");
// now we can create a dump of the index and the gap information
// to speed up the next start
try{
longstart=System.currentTimeMillis();
newGap().dump(fingerprintGapFile(this.heapFile));
index.dump(fingerprintIndexFile(this.heapFile));
Log.logInfo("kelondroBLOBHeapWriter","wrote a dump for the "+this.index.size()+" index entries of "+heapFile.getName()+" in "+(System.currentTimeMillis()-start)+" milliseconds.");
Log.logInfo("kelondroBLOBHeapWriter","wrote a dump for the "+this.index.size()+" index entries of "+heapFileREADY.getName()+" in "+(System.currentTimeMillis()-start)+" milliseconds.");