- fix for deadlock with 100% CPU during search

- fix for failure of ranking because of a ConcurrentModificationException

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6553 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 15 years ago
parent 3f771d2a16
commit bb2e03761c

@ -76,7 +76,7 @@ public final class RankingProcess extends Thread {
private int remote_peerCount, remote_indexCount, remote_resourceSize, local_resourceSize;
private final SortStack<WordReferenceVars> stack;
private int feeders;
private final HashMap<String, SortStack<WordReferenceVars>> doubleDomCache; // key = domhash (6 bytes); value = like stack
private final ConcurrentHashMap<String, SortStack<WordReferenceVars>> doubleDomCache; // key = domhash (6 bytes); value = like stack
private final HashSet<String> handover; // key = urlhash; used for double-check of urls that had been handed over to search process
private final ConcurrentHashMap<String, Integer> ref; // reference score computation for the commonSense heuristic
@ -90,7 +90,7 @@ public final class RankingProcess extends Thread {
// sortorder: 0 = hash, 1 = url, 2 = ranking
this.localSearchInclusion = null;
this.stack = new SortStack<WordReferenceVars>(maxentries, true);
this.doubleDomCache = new HashMap<String, SortStack<WordReferenceVars>>();
this.doubleDomCache = new ConcurrentHashMap<String, SortStack<WordReferenceVars>>();
this.handover = new HashSet<String>();
this.query = query;
this.order = order;

@ -153,8 +153,8 @@ public class ResultFetcher {
this.lastLifeSign = System.currentTimeMillis();
// check if we have enough
//if ((query.contentdom == ContentDomain.IMAGE) && (images.size() >= query.neededResults() + fetchAhead)) break;
//if ((query.contentdom != ContentDomain.IMAGE) && (result.size() >= query.neededResults() + fetchAhead)) break;
if ((query.contentdom == ContentDomain.IMAGE) && (images.size() >= query.neededResults() + 50)) break;
if ((query.contentdom != ContentDomain.IMAGE) && (result.size() >= query.neededResults() + 10)) break;
// get next entry
page = rankedCache.takeURL(true, taketimeout);

Loading…
Cancel
Save