- larger update time for status page

- balancer writes cause of robots.txt in log file for crawl delay
- removed log output for forced GC
- smaller RAM flush for RWI cache, should cause more usage of cache and faster crawling

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5228 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent 3a919bf24e
commit c8bdd965ec

@ -18,7 +18,7 @@
} }
function init() { function init() {
loaderBanner = window.setInterval("loadBanner()", 20000); loaderBanner = window.setInterval("loadBanner()", 20000);
window.setInterval("reloadGraph()", 5000); window.setInterval("reloadGraph()", 8000);
} }
</script> </script>
</head> </head>

@ -573,7 +573,7 @@ public class Balancer {
// in best case, this should never happen if the balancer works propertly // in best case, this should never happen if the balancer works propertly
// this is only to protection against the worst case, where the crawler could // this is only to protection against the worst case, where the crawler could
// behave in a DoS-manner // behave in a DoS-manner
serverLog.logInfo("BALANCER", "forcing fetch delay of " + sleeptime + " millisecond for " + crawlEntry.url().getHost()); serverLog.logInfo("BALANCER", "forcing crawl-delay of " + sleeptime + " milliseconds for " + crawlEntry.url().getHost() + ((sleeptime > Math.max(minimumLocalDelta, minimumGlobalDelta)) ? " (caused by robots.txt)" : ""));
try {synchronized(this) { this.wait(sleeptime); }} catch (final InterruptedException e) {} try {synchronized(this) { this.wait(sleeptime); }} catch (final InterruptedException e) {}
} }

@ -78,7 +78,7 @@ public final class plasmaWordIndex implements indexRI {
// environment constants // environment constants
public static final long wCacheMaxAge = 1000 * 60 * 30; // milliseconds; 30 minutes public static final long wCacheMaxAge = 1000 * 60 * 30; // milliseconds; 30 minutes
public static final int wCacheMaxChunk = 800; // maximum number of references for each urlhash public static final int wCacheMaxChunk = 800; // maximum number of references for each urlhash
public static final int lowcachedivisor = 1200; public static final int lowcachedivisor = 1800;
public static final int maxCollectionPartition = 7; // should be 7 public static final int maxCollectionPartition = 7; // should be 7

@ -50,9 +50,9 @@ public class serverMemory {
final long elapsed = System.currentTimeMillis() - lastGC; final long elapsed = System.currentTimeMillis() - lastGC;
if (elapsed > last) { if (elapsed > last) {
final long free = free(); final long free = free();
System.out.println("vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv"); //System.out.println("vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv");
System.gc(); System.gc();
System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ if you see this many times please report to forum"); //System.out.println("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ if you see this many times please report to forum");
lastGC = System.currentTimeMillis(); lastGC = System.currentTimeMillis();
if (log.isFine()) log.logInfo("[gc] before: " + bytesToString(free) + ", after: " + bytesToString(free()) + ", call: " + info); if (log.isFine()) log.logInfo("[gc] before: " + bytesToString(free) + ", after: " + bytesToString(free()) + ", call: " + info);
} else if (log.isFine()) { } else if (log.isFine()) {

Loading…
Cancel
Save