- refactoring of cache naming in web index cache (no more dht semantics there)

- activating a feature in the thread dump that cuts off dumping of a trance of inside-java-core events

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5593 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 16 years ago
parent e52c3eb79d
commit 6a32193916

@ -289,11 +289,11 @@ public class PerformanceQueues_p {
// table cache settings // table cache settings
prop.putNum("urlCacheSize", switchboard.webIndex.getURLwriteCacheSize()); prop.putNum("urlCacheSize", switchboard.webIndex.getURLwriteCacheSize());
prop.putNum("wordCacheSize", switchboard.webIndex.dhtCacheSize()); prop.putNum("wordCacheSize", switchboard.webIndex.indexCacheSize());
prop.putNum("wordCacheSizeKBytes", switchboard.webIndex.dhtCacheSizeBytes()/1024); prop.putNum("wordCacheSizeKBytes", switchboard.webIndex.indexCacheSizeBytes()/1024);
prop.putNum("maxURLinCache", switchboard.webIndex.maxURLinDHTCache()); prop.putNum("maxURLinCache", switchboard.webIndex.maxURLinCache());
prop.putNum("maxAgeOfCache", switchboard.webIndex.maxAgeOfDHTCache() / 1000 / 60); // minutes prop.putNum("maxAgeOfCache", switchboard.webIndex.maxAgeOfCache() / 1000 / 60); // minutes
prop.putNum("minAgeOfCache", switchboard.webIndex.minAgeOfDHTCache() / 1000 / 60); // minutes prop.putNum("minAgeOfCache", switchboard.webIndex.minAgeOfCache() / 1000 / 60); // minutes
prop.putNum("maxWaitingWordFlush", switchboard.getConfigLong("maxWaitingWordFlush", 180)); prop.putNum("maxWaitingWordFlush", switchboard.getConfigLong("maxWaitingWordFlush", 180));
prop.put("wordCacheMaxCount", switchboard.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 20000)); prop.put("wordCacheMaxCount", switchboard.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 20000));
prop.put("wordCacheInitCount", switchboard.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_INIT_COUNT, 30000)); prop.put("wordCacheInitCount", switchboard.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_INIT_COUNT, 30000));

@ -132,7 +132,7 @@ public class Threaddump_p {
Entry<String, Integer> e = removeMax(dumps); Entry<String, Integer> e = removeMax(dumps);
bufferappend(buffer, plain, "Occurrences: " + e.getValue()); bufferappend(buffer, plain, "Occurrences: " + e.getValue());
bufferappend(buffer, plain, e.getKey()); bufferappend(buffer, plain, e.getKey());
bufferappend(buffer, plain, ""); //bufferappend(buffer, plain, "");
} }
bufferappend(buffer, plain, ""); bufferappend(buffer, plain, "");
} }
@ -193,11 +193,16 @@ public class Threaddump_p {
tracename = "[" + tracename + "] "; tracename = "[" + tracename + "] ";
} }
String threadtitle = tracename + "Thread= " + thread.getName() + " " + (thread.isDaemon()?"daemon":"") + " id=" + thread.getId() + " " + thread.getState().toString(); String threadtitle = tracename + "Thread= " + thread.getName() + " " + (thread.isDaemon()?"daemon":"") + " id=" + thread.getId() + " " + thread.getState().toString();
String className;
for (int i = 0; i < stackTraceElements.length; i++) { for (int i = 0; i < stackTraceElements.length; i++) {
ste = stackTraceElements[i]; ste = stackTraceElements[i];
//if (ste.getClassName().startsWith("java.") || ste.getClassName().startsWith("sun.")) continue; className = ste.getClassName();
if (className.startsWith("java.") || className.startsWith("sun.")) {
sb.setLength(0);
bufferappend(sb, plain, tracename + "at " + htmlFilterCharacterCoding.unicode2html(ste.toString(), true));
} else {
if (i == 0) { if (i == 0) {
line = getLine(getClassFile(classPath, ste.getClassName()), ste.getLineNumber()); line = getLine(getClassFile(classPath, className), ste.getLineNumber());
} else { } else {
line = null; line = null;
} }
@ -207,6 +212,7 @@ public class Threaddump_p {
bufferappend(sb, plain, tracename + "at " + htmlFilterCharacterCoding.unicode2html(ste.toString(), true)); bufferappend(sb, plain, tracename + "at " + htmlFilterCharacterCoding.unicode2html(ste.toString(), true));
} }
} }
}
String threaddump = sb.toString(); String threaddump = sb.toString();
ArrayList<String> threads = dumps.get(threaddump); ArrayList<String> threads = dumps.get(threaddump);
if (threads == null) threads = new ArrayList<String>(); if (threads == null) threads = new ArrayList<String>();

@ -21,11 +21,11 @@ public class status_p {
prop.setLocalized(false); prop.setLocalized(false);
prop.put("rejected", "0"); prop.put("rejected", "0");
sb.updateMySeed(); sb.updateMySeed();
final int cacheSize = sb.webIndex.dhtCacheSize(); final int cacheSize = sb.webIndex.indexCacheSize();
final long cacheMaxSize = sb.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 10000); final long cacheMaxSize = sb.getConfigLong(plasmaSwitchboardConstants.WORDCACHE_MAX_COUNT, 10000);
prop.putNum("ppm", sb.currentPPM()); prop.putNum("ppm", sb.currentPPM());
prop.putNum("qpm", sb.webIndex.seedDB.mySeed().getQPM()); prop.putNum("qpm", sb.webIndex.seedDB.mySeed().getQPM());
prop.putNum("wordCacheSize", sb.webIndex.dhtCacheSize()); prop.putNum("wordCacheSize", sb.webIndex.indexCacheSize());
prop.putNum("wordCacheSize", cacheSize); prop.putNum("wordCacheSize", cacheSize);
prop.putNum("wordCacheMaxSize", cacheMaxSize); prop.putNum("wordCacheMaxSize", cacheMaxSize);
prop.put("wordCacheCount", cacheSize); prop.put("wordCacheCount", cacheSize);

@ -101,9 +101,9 @@ public final class transferRWI {
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Not granted."); sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Not granted.");
result = "not_granted"; result = "not_granted";
pause = 0; pause = 0;
} else if (checkLimit && sb.webIndex.dhtCacheSize() > cachelimit) { } else if (checkLimit && sb.webIndex.indexCacheSize() > cachelimit) {
// we are too busy to receive indexes // we are too busy to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (buffersize=" + sb.webIndex.dhtCacheSize() + ")."); sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (buffersize=" + sb.webIndex.indexCacheSize() + ").");
granted = false; // don't accept more words if there are too many words to flush granted = false; // don't accept more words if there are too many words to flush
result = "busy"; result = "busy";
pause = 60000; pause = 60000;
@ -201,7 +201,7 @@ public final class transferRWI {
result = "ok"; result = "ok";
if (checkLimit) { if (checkLimit) {
pause = (sb.webIndex.dhtCacheSize() < 500) ? 0 : sb.webIndex.dhtCacheSize(); // estimation of necessary pause time pause = (sb.webIndex.indexCacheSize() < 500) ? 0 : sb.webIndex.indexCacheSize(); // estimation of necessary pause time
} }
} }

@ -96,7 +96,7 @@ public final class plasmaWordIndex implements indexRI {
private final ByteOrder indexOrder = Base64Order.enhancedCoder; private final ByteOrder indexOrder = Base64Order.enhancedCoder;
private final indexRAMRI dhtCache; private final indexRAMRI indexCache;
private final indexCollectionRI collections; // new database structure to replace AssortmentCluster and FileCluster private final indexCollectionRI collections; // new database structure to replace AssortmentCluster and FileCluster
private final Log log; private final Log log;
public indexRepositoryReference referenceURL; public indexRepositoryReference referenceURL;
@ -147,15 +147,15 @@ public final class plasmaWordIndex implements indexRI {
if (!(textindexcache.exists())) textindexcache.mkdirs(); if (!(textindexcache.exists())) textindexcache.mkdirs();
if (new File(textindexcache, "index.dhtin.blob").exists()) { if (new File(textindexcache, "index.dhtin.blob").exists()) {
// migration of the both caches into one // migration of the both caches into one
this.dhtCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtout.blob", log); this.indexCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtout.blob", log);
indexRAMRI dhtInCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtin.blob", log); indexRAMRI dhtInCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtin.blob", log);
for (indexContainer c: dhtInCache) { for (indexContainer c: dhtInCache) {
this.dhtCache.addEntries(c); this.indexCache.addEntries(c);
} }
new File(textindexcache, "index.dhtin.blob").delete(); new File(textindexcache, "index.dhtin.blob").delete();
} else { } else {
// read in new BLOB // read in new BLOB
this.dhtCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtout.blob", log); this.indexCache = new indexRAMRI(textindexcache, indexRWIRowEntry.urlEntryRow, entityCacheMaxSize, wCacheMaxChunk, wCacheMaxAge, "index.dhtout.blob", log);
} }
// create collections storage path // create collections storage path
@ -242,7 +242,7 @@ public final class plasmaWordIndex implements indexRI {
} }
public void clear() { public void clear() {
dhtCache.clear(); indexCache.clear();
collections.clear(); collections.clear();
try { try {
referenceURL.clear(); referenceURL.clear();
@ -415,30 +415,30 @@ public final class plasmaWordIndex implements indexRI {
} }
public int minMem() { public int minMem() {
return 1024*1024 /* indexing overhead */ + dhtCache.minMem() + collections.minMem(); return 1024*1024 /* indexing overhead */ + indexCache.minMem() + collections.minMem();
} }
public int maxURLinDHTCache() { public int maxURLinCache() {
return dhtCache.maxURLinCache(); return indexCache.maxURLinCache();
} }
public long minAgeOfDHTCache() { public long minAgeOfCache() {
return dhtCache.minAgeOfCache(); return indexCache.minAgeOfCache();
} }
public long maxAgeOfDHTCache() { public long maxAgeOfCache() {
return dhtCache.maxAgeOfCache(); return indexCache.maxAgeOfCache();
} }
public int dhtCacheSize() { public int indexCacheSize() {
return dhtCache.size(); return indexCache.size();
} }
public long dhtCacheSizeBytes() { public long indexCacheSizeBytes() {
// calculate the real size in bytes of DHT-In/Out-Cache // calculate the real size in bytes of the index cache
long cacheBytes = 0; long cacheBytes = 0;
final long entryBytes = indexRWIRowEntry.urlEntryRow.objectsize; final long entryBytes = indexRWIRowEntry.urlEntryRow.objectsize;
final indexRAMRI cache = (dhtCache); final indexRAMRI cache = (indexCache);
synchronized (cache) { synchronized (cache) {
final Iterator<indexContainer> it = cache.wordContainerIterator(null, false, true); final Iterator<indexContainer> it = cache.wordContainerIterator(null, false, true);
while (it.hasNext()) cacheBytes += it.next().size() * entryBytes; while (it.hasNext()) cacheBytes += it.next().size() * entryBytes;
@ -447,10 +447,10 @@ public final class plasmaWordIndex implements indexRI {
} }
public void setMaxWordCount(final int maxWords) { public void setMaxWordCount(final int maxWords) {
dhtCache.setMaxWordCount(maxWords); indexCache.setMaxWordCount(maxWords);
} }
public void dhtFlushControl(final indexRAMRI theCache) { public void cacheFlushControl(final indexRAMRI theCache) {
// check for forced flush // check for forced flush
int cs = cacheSize(); int cs = cacheSize();
if (cs > 0) { if (cs > 0) {
@ -479,16 +479,16 @@ public final class plasmaWordIndex implements indexRI {
public void addEntry(final String wordHash, final indexRWIRowEntry entry, final long updateTime) { public void addEntry(final String wordHash, final indexRWIRowEntry entry, final long updateTime) {
// add the entry // add the entry
dhtCache.addEntry(wordHash, entry, updateTime, true); indexCache.addEntry(wordHash, entry, updateTime, true);
dhtFlushControl(this.dhtCache); cacheFlushControl(this.indexCache);
} }
public void addEntries(final indexContainer entries) { public void addEntries(final indexContainer entries) {
assert (entries.row().objectsize == indexRWIRowEntry.urlEntryRow.objectsize); assert (entries.row().objectsize == indexRWIRowEntry.urlEntryRow.objectsize);
// add the entry // add the entry
dhtCache.addEntries(entries); indexCache.addEntries(entries);
dhtFlushControl(this.dhtCache); cacheFlushControl(this.indexCache);
} }
public void flushCacheFor(int time) { public void flushCacheFor(int time) {
@ -496,8 +496,8 @@ public final class plasmaWordIndex implements indexRI {
} }
private synchronized void flushCacheUntil(long timeout) { private synchronized void flushCacheUntil(long timeout) {
while (System.currentTimeMillis() < timeout && dhtCache.size() > 0) { while (System.currentTimeMillis() < timeout && indexCache.size() > 0) {
flushCacheOne(dhtCache); flushCacheOne(indexCache);
} }
} }
@ -570,7 +570,7 @@ public final class plasmaWordIndex implements indexRI {
} }
public boolean hasContainer(final String wordHash) { public boolean hasContainer(final String wordHash) {
if (dhtCache.hasContainer(wordHash)) return true; if (indexCache.hasContainer(wordHash)) return true;
if (collections.hasContainer(wordHash)) return true; if (collections.hasContainer(wordHash)) return true;
return false; return false;
} }
@ -583,7 +583,7 @@ public final class plasmaWordIndex implements indexRI {
// get from cache // get from cache
indexContainer container; indexContainer container;
container = dhtCache.getContainer(wordHash, urlselection); container = indexCache.getContainer(wordHash, urlselection);
// get from collection index // get from collection index
if (container == null) { if (container == null) {
@ -668,7 +668,7 @@ public final class plasmaWordIndex implements indexRI {
} }
public int size() { public int size() {
return java.lang.Math.max(collections.size(), dhtCache.size()); return java.lang.Math.max(collections.size(), indexCache.size());
} }
public int collectionsSize() { public int collectionsSize() {
@ -676,11 +676,11 @@ public final class plasmaWordIndex implements indexRI {
} }
public int cacheSize() { public int cacheSize() {
return dhtCache.size(); return indexCache.size();
} }
public void close() { public void close() {
dhtCache.close(); indexCache.close();
collections.close(); collections.close();
referenceURL.close(); referenceURL.close();
seedDB.close(); seedDB.close();
@ -692,15 +692,15 @@ public final class plasmaWordIndex implements indexRI {
final indexContainer c = new indexContainer( final indexContainer c = new indexContainer(
wordHash, wordHash,
indexRWIRowEntry.urlEntryRow, indexRWIRowEntry.urlEntryRow,
dhtCache.sizeContainer(wordHash)); indexCache.sizeContainer(wordHash));
c.addAllUnique(dhtCache.deleteContainer(wordHash)); c.addAllUnique(indexCache.deleteContainer(wordHash));
c.addAllUnique(collections.deleteContainer(wordHash)); c.addAllUnique(collections.deleteContainer(wordHash));
return c; return c;
} }
public boolean removeEntry(final String wordHash, final String urlHash) { public boolean removeEntry(final String wordHash, final String urlHash) {
boolean removed = false; boolean removed = false;
removed = removed | (dhtCache.removeEntry(wordHash, urlHash)); removed = removed | (indexCache.removeEntry(wordHash, urlHash));
removed = removed | (collections.removeEntry(wordHash, urlHash)); removed = removed | (collections.removeEntry(wordHash, urlHash));
return removed; return removed;
} }
@ -718,14 +718,14 @@ public final class plasmaWordIndex implements indexRI {
public int removeEntries(final String wordHash, final Set<String> urlHashes) { public int removeEntries(final String wordHash, final Set<String> urlHashes) {
int removed = 0; int removed = 0;
removed += dhtCache.removeEntries(wordHash, urlHashes); removed += indexCache.removeEntries(wordHash, urlHashes);
removed += collections.removeEntries(wordHash, urlHashes); removed += collections.removeEntries(wordHash, urlHashes);
return removed; return removed;
} }
public String removeEntriesExpl(final String wordHash, final Set<String> urlHashes) { public String removeEntriesExpl(final String wordHash, final Set<String> urlHashes) {
String removed = ""; String removed = "";
removed += dhtCache.removeEntries(wordHash, urlHashes) + ", "; removed += indexCache.removeEntries(wordHash, urlHashes) + ", ";
removed += collections.removeEntries(wordHash, urlHashes); removed += collections.removeEntries(wordHash, urlHashes);
return removed; return removed;
} }
@ -753,12 +753,12 @@ public final class plasmaWordIndex implements indexRI {
public synchronized TreeSet<indexContainer> indexContainerSet(final String startHash, final boolean ram, final boolean rot, int count) { public synchronized TreeSet<indexContainer> indexContainerSet(final String startHash, final boolean ram, final boolean rot, int count) {
// creates a set of indexContainers // creates a set of indexContainers
// this does not use the dhtInCache // this does not use the cache
final Order<indexContainer> containerOrder = new indexContainerOrder(indexOrder.clone()); final Order<indexContainer> containerOrder = new indexContainerOrder(indexOrder.clone());
containerOrder.rotate(emptyContainer(startHash, 0)); containerOrder.rotate(emptyContainer(startHash, 0));
final TreeSet<indexContainer> containers = new TreeSet<indexContainer>(containerOrder); final TreeSet<indexContainer> containers = new TreeSet<indexContainer>(containerOrder);
final Iterator<indexContainer> i = wordContainerIterator(startHash, rot, ram); final Iterator<indexContainer> i = wordContainerIterator(startHash, rot, ram);
if (ram) count = Math.min(dhtCache.size(), count); if (ram) count = Math.min(indexCache.size(), count);
indexContainer container; indexContainer container;
// this loop does not terminate using the i.hasNex() predicate when rot == true // this loop does not terminate using the i.hasNex() predicate when rot == true
// because then the underlying iterator is a rotating iterator without termination // because then the underlying iterator is a rotating iterator without termination
@ -891,7 +891,7 @@ public final class plasmaWordIndex implements indexRI {
public synchronized CloneableIterator<indexContainer> wordContainerIterator(final String startHash, final boolean rot, final boolean ram) { public synchronized CloneableIterator<indexContainer> wordContainerIterator(final String startHash, final boolean rot, final boolean ram) {
final CloneableIterator<indexContainer> i = wordContainers(startHash, ram); final CloneableIterator<indexContainer> i = wordContainers(startHash, ram);
if (rot) { if (rot) {
return new RotateIterator<indexContainer>(i, new String(Base64Order.zero(startHash.length())), dhtCache.size() + ((ram) ? 0 : collections.size())); return new RotateIterator<indexContainer>(i, new String(Base64Order.zero(startHash.length())), indexCache.size() + ((ram) ? 0 : collections.size()));
} }
return i; return i;
} }
@ -900,10 +900,10 @@ public final class plasmaWordIndex implements indexRI {
final Order<indexContainer> containerOrder = new indexContainerOrder(indexOrder.clone()); final Order<indexContainer> containerOrder = new indexContainerOrder(indexOrder.clone());
containerOrder.rotate(emptyContainer(startWordHash, 0)); containerOrder.rotate(emptyContainer(startWordHash, 0));
if (ram) { if (ram) {
return dhtCache.wordContainerIterator(startWordHash, false, true); return indexCache.wordContainerIterator(startWordHash, false, true);
} }
return new MergeIterator<indexContainer>( return new MergeIterator<indexContainer>(
dhtCache.wordContainerIterator(startWordHash, false, true), indexCache.wordContainerIterator(startWordHash, false, true),
collections.wordContainerIterator(startWordHash, false, false), collections.wordContainerIterator(startWordHash, false, false),
containerOrder, containerOrder,
indexContainer.containerMergeMethod, indexContainer.containerMergeMethod,
@ -1019,7 +1019,7 @@ public final class plasmaWordIndex implements indexRI {
} }
public int sizeEntry(String key) { public int sizeEntry(String key) {
return dhtCache.sizeEntry(key) + collections.sizeEntry(key); return indexCache.sizeEntry(key) + collections.sizeEntry(key);
} }
} }

Loading…
Cancel
Save