prevent NPE in case RWI is disabled

pull/1/head
orbiter 12 years ago
parent c2bcfd8afb
commit da621e827e

@ -31,6 +31,8 @@ import java.util.Map;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.data.word.WordReference;
import net.yacy.kelondro.rwi.IndexCell;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.Formatter;
import net.yacy.kelondro.util.MemoryControl;
@ -132,6 +134,7 @@ public class PerformanceQueues_p {
sb.setConfig("performanceSpeed", post.getInt("profileSpeed", 100));
}
IndexCell<WordReference> rwi = indexSegment.termIndex();
while (threads.hasNext()) {
threadName = threads.next();
thread = sb.getThread(threadName);
@ -215,7 +218,7 @@ public class PerformanceQueues_p {
// prop.put("table_" + c + "_disabled", /*(threadName.endsWith("_indexing")) ? 1 :*/ "0");
prop.put("table_" + c + "_disabled", threadName.equals("10_httpd") ? "1" : "0" ); // httpd hardcoded defaults
prop.put("table_" + c + "_recommendation", threadName.endsWith("_indexing") ? "1" : "0");
prop.putNum("table_" + c + "_recommendation_value", threadName.endsWith("_indexing") ? (indexSegment.termIndex().minMem() / 1024) : 0);
prop.putNum("table_" + c + "_recommendation_value", rwi == null ? 0 : threadName.endsWith("_indexing") ? (rwi.minMem() / 1024) : 0);
c++;
}
prop.put("table", c);
@ -245,7 +248,7 @@ public class PerformanceQueues_p {
if ((post != null) && (post.containsKey("cacheSizeSubmit"))) {
final int wordCacheMaxCount = post.getInt("wordCacheMaxCount", 20000);
sb.setConfig(SwitchboardConstants.WORDCACHE_MAX_COUNT, Integer.toString(wordCacheMaxCount));
indexSegment.termIndex().setBufferMaxWordCount(wordCacheMaxCount);
if (rwi != null) rwi.setBufferMaxWordCount(wordCacheMaxCount);
}
if ((post != null) && (post.containsKey("poolConfig"))) {
@ -301,10 +304,10 @@ public class PerformanceQueues_p {
// table cache settings
prop.putNum("wordCacheSize", indexSegment.RWIBufferCount());
prop.putNum("wordCacheSizeKBytes", indexSegment.termIndex().getBufferSizeBytes()/1024);
prop.putNum("maxURLinCache", indexSegment.termIndex().getBufferMaxReferences());
prop.putNum("maxAgeOfCache", indexSegment.termIndex().getBufferMaxAge() / 1000 / 60); // minutes
prop.putNum("minAgeOfCache", indexSegment.termIndex().getBufferMinAge() / 1000 / 60); // minutes
prop.putNum("wordCacheSizeKBytes", rwi == null ? 0 : rwi.getBufferSizeBytes()/1024);
prop.putNum("maxURLinCache", rwi == null ? 0 : rwi.getBufferMaxReferences());
prop.putNum("maxAgeOfCache", rwi == null ? 0 : rwi.getBufferMaxAge() / 1000 / 60); // minutes
prop.putNum("minAgeOfCache", rwi == null ? 0 : rwi.getBufferMinAge() / 1000 / 60); // minutes
prop.putNum("maxWaitingWordFlush", sb.getConfigLong("maxWaitingWordFlush", 180));
prop.put("wordCacheMaxCount", sb.getConfigLong(SwitchboardConstants.WORDCACHE_MAX_COUNT, 20000));
prop.put("crawlPauseProxy", sb.getConfigLong(SwitchboardConstants.PROXY_ONLINE_CAUTION_DELAY, 30000));

@ -95,7 +95,7 @@ public final class query {
if (obj.equals("rwiurlcount")) try {
// the total number of different urls in the rwi is returned
// <env> shall contain a word hash, the number of assigned lurls to this hash is returned
prop.put("response", sb.index.termIndex().get(env.getBytes(), null).size());
prop.put("response", sb.index.termIndex() == null ? 0 : sb.index.termIndex().get(env.getBytes(), null).size());
return prop;
} catch (final IOException e) {
Log.logException(e);

@ -258,7 +258,7 @@ public final class search {
final long timer = System.currentTimeMillis();
//final Map<byte[], ReferenceContainer<WordReference>>[] containers = sb.indexSegment.index().searchTerm(theQuery.queryHashes, theQuery.excludeHashes, plasmaSearchQuery.hashes2StringSet(urls));
final TreeMap<byte[], ReferenceContainer<WordReference>> incc = indexSegment.termIndex().searchConjunction(theQuery.getQueryGoal().getIncludeHashes(), QueryParams.hashes2Set(urls));
final TreeMap<byte[], ReferenceContainer<WordReference>> incc = indexSegment.termIndex() == null ? new TreeMap<byte[], ReferenceContainer<WordReference>>() : indexSegment.termIndex().searchConjunction(theQuery.getQueryGoal().getIncludeHashes(), QueryParams.hashes2Set(urls));
EventTracker.update(EventTracker.EClass.SEARCH, new ProfilingGraph.EventSearch(theQuery.id(true), SearchEventType.COLLECTION, "", incc.size(), System.currentTimeMillis() - timer), false);
if (incc != null) {

@ -540,7 +540,7 @@ public class yacysearch {
// delete the index entry locally
final String delHash = post.get("deleteref", ""); // urlhash
indexSegment.termIndex().remove(qg.getIncludeHashes(), delHash.getBytes());
if (indexSegment.termIndex() != null) indexSegment.termIndex().remove(qg.getIncludeHashes(), delHash.getBytes());
// make new news message with negative voting
if ( !sb.isRobinsonMode() ) {

@ -169,7 +169,7 @@ public class Dispatcher {
final ArrayList<ReferenceContainer<WordReference>> containers = new ArrayList<ReferenceContainer<WordReference>>(maxContainerCount);
final Iterator<ReferenceContainer<WordReference>> indexContainerIterator = this.segment.termIndex().referenceContainerIterator(hash, true, true, ram); // very important that rotation is true here
final Iterator<ReferenceContainer<WordReference>> indexContainerIterator = this.segment.termIndex() == null ? new ArrayList<ReferenceContainer<WordReference>>().iterator() : this.segment.termIndex().referenceContainerIterator(hash, true, true, ram); // very important that rotation is true here
ReferenceContainer<WordReference> container;
int refcount = 0;
@ -201,7 +201,7 @@ public class Dispatcher {
it = c.entries();
while (it.hasNext()) try { urlHashes.put(it.next().urlhash()); } catch (final SpaceExceededException e) { Log.logException(e); }
if (this.log.isFine()) this.log.logFine("selected " + urlHashes.size() + " urls for word '" + ASCII.String(c.getTermHash()) + "'");
if (!urlHashes.isEmpty()) this.segment.termIndex().remove(c.getTermHash(), urlHashes);
if (this.segment.termIndex() != null && !urlHashes.isEmpty()) this.segment.termIndex().remove(c.getTermHash(), urlHashes);
}
rc = containers;
} else {
@ -209,7 +209,7 @@ public class Dispatcher {
// but to avoid race conditions return the results from the deletes
rc = new ArrayList<ReferenceContainer<WordReference>>(containers.size());
for (final ReferenceContainer<WordReference> c: containers) {
container = this.segment.termIndex().remove(c.getTermHash()); // be aware this might be null!
container = this.segment.termIndex() == null ? null : this.segment.termIndex().remove(c.getTermHash()); // be aware this might be null!
if (container != null && !container.isEmpty()) {
if (this.log.isFine()) this.log.logFine("selected " + container.size() + " urls for word '" + ASCII.String(c.getTermHash()) + "'");
rc.add(container);

@ -394,7 +394,44 @@ public final class Protocol {
return count;
}
/*
private int readSeeds(String prefix) {
String seedStr;
while ( (seedStr = result.get("seed" + i++)) != null ) {
// integrate new seed into own database
// the first seed, "seed0" is the seed of the responding peer
if ( seedStr.length() > Seed.maxsize ) {
Network.log.logInfo("hello/client: rejected contacting seed; too large ("
+ seedStr.length()
+ " > "
+ Seed.maxsize
+ ")");
} else {
try {
if ( i == 1 ) {
final int p = address.indexOf(':');
if ( p < 0 ) {
return -1;
}
InetAddress ia = Domains.dnsResolve(address.substring(0, p));
if (ia == null) continue;
final String host = ia.getHostAddress();
s = Seed.genRemoteSeed(seedStr, false, host);
} else {
s = Seed.genRemoteSeed(seedStr, false, null);
}
if ( peerActions.peerArrival(s, (i == 1)) ) {
count++;
}
} catch ( final IOException e ) {
Network.log.logInfo("hello/client: rejected contacting seed; bad ("
+ e.getMessage()
+ ")");
}
}
}
}
*/
public static Seed querySeed(final Seed target, final String seedHash) {
// prepare request
final String salt = crypt.randomSalt();

@ -713,7 +713,7 @@ public class Segment {
// delete all word references
int count = 0;
if (words != null) count = termIndex().remove(Word.words2hashesHandles(words), urlhash);
if (words != null && termIndex() != null) count = termIndex().remove(Word.words2hashesHandles(words), urlhash);
// finally delete the url entry itself
fulltext().remove(urlhash);

@ -1026,8 +1026,10 @@ public final class SearchEvent {
// check index-of constraint
if ((this.query.constraint != null) && (this.query.constraint.get(Condenser.flag_cat_indexof)) && (!(pagetitle.startsWith("index of")))) {
final Iterator<byte[]> wi = this.query.getQueryGoal().getIncludeHashes().iterator();
while (wi.hasNext()) {
this.query.getSegment().termIndex().removeDelayed(wi.next(), page.hash());
if (this.query.getSegment().termIndex() != null) {
while (wi.hasNext()) {
this.query.getSegment().termIndex().removeDelayed(wi.next(), page.hash());
}
}
if (log.isFine()) log.logFine("dropped RWI: url does not match index-of constraint");
if (page.word().local()) this.local_rwi_available.decrementAndGet(); else this.remote_rwi_available.decrementAndGet();

@ -86,7 +86,7 @@ public class ResultEntry implements Comparable<ResultEntry>, Comparator<ResultEn
if ((seed == null) || ((address = seed.getPublicAddress()) == null)) {
// seed is not known from here
try {
indexSegment.termIndex().remove(
if (indexSegment.termIndex() != null) indexSegment.termIndex().remove(
Word.words2hashesHandles(Condenser.getWords(
("yacyshare " +
path.replace('?', ' ') +

Loading…
Cancel
Save