removed warnings, removed too-fast pausing of crawls

pull/1/head
Michael Peter Christen 12 years ago
parent 8041742e48
commit 93001586a0

@ -123,10 +123,6 @@ import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.FacetParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.response.ResultContext;
import org.apache.solr.search.DocList;
public final class Protocol

@ -365,16 +365,21 @@ public class Segment {
// STORE TO SOLR
final SolrInputDocument solrInputDoc = this.fulltext.getSolrScheme().yacy2solr(id, profile, responseHeader, document, condenser, referrerURL, language);
try {
this.fulltext.putDocument(solrInputDoc);
} catch ( final IOException e ) {
String error = "failed to send " + urlNormalform + " to solr";
Log.logWarning("SOLR", error + ", pausing Crawler! - " + e.getMessage());
tryloop: for (int i = 0; i < 10; i++) {
String error = "";
try {
this.fulltext.putDocument(solrInputDoc);
break tryloop;
} catch ( final IOException e ) {
error = "failed to send " + urlNormalform + " to solr";
Log.logWarning("SOLR", error + e.getMessage());
try {Thread.sleep(1000);} catch (InterruptedException e1) {}
}
Log.logWarning("SOLR", error + ", pausing Crawler!");
// pause the crawler!!!
Switchboard.getSwitchboard().pauseCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL, error);
Switchboard.getSwitchboard().pauseCrawlJob(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL, error);
}
final long storageEndTime = System.currentTimeMillis();
// STORE PAGE INDEX INTO WORD INDEX DB

@ -637,9 +637,8 @@ public final class SearchEvent {
}
} catch ( final InterruptedException e1 ) {
}
//Log.logWarning("RWIProcess", "feedingIsFinished() = " + feedingIsFinished() + ", this.addRunning = " + this.addRunning + ", this.nodeStack.sizeQueue() = " + this.nodeStack.sizeQueue() + ", this.stack.sizeQueue() = " + this.rwiStack.sizeQueue() + ", this.doubleDomCachee.size() = " + this.doubleDomCache.size());
if (this.rankingProcess.doubleDomCache.isEmpty()) {
Log.logWarning("RWIProcess", "doubleDomCache.isEmpty");
//Log.logWarning("RWIProcess", "doubleDomCache.isEmpty");
return null;
}
@ -666,7 +665,7 @@ public final class SearchEvent {
if (o.getWeight() < bestEntry.getWeight()) bestEntry = o;
}
if (bestEntry == null) {
Log.logWarning("RWIProcess", "bestEntry == null (1)");
//Log.logWarning("RWIProcess", "bestEntry == null (1)");
return null;
}
@ -683,7 +682,7 @@ public final class SearchEvent {
}
}
if (bestEntry == null) {
Log.logWarning("RWIProcess", "bestEntry == null (2)");
//Log.logWarning("RWIProcess", "bestEntry == null (2)");
return null;
}
return this.query.getSegment().fulltext().getMetadata(bestEntry.getElement(), bestEntry.getWeight());
@ -708,7 +707,7 @@ public final class SearchEvent {
//System.out.println("timeleft = " + timeleft);
final URIMetadataNode page = takeRWI(skipDoubleDom, timeleft);
if (page == null) {
Log.logWarning("RWIProcess", "takeRWI returned null");
//Log.logWarning("RWIProcess", "takeRWI returned null");
return null; // all time was already wasted in takeRWI to get another element
}

@ -89,7 +89,7 @@ public class SnippetWorker extends Thread {
//if (page != null) Log.logInfo("SnippetProcess", "got one page: " + page.metadata().url().toNormalform(true, false));
//if (page == null) page = rankedCache.takeURL(false, this.timeout - System.currentTimeMillis());
if (page == null) {
Log.logWarning("SnippetProcess", "page == null");
//Log.logWarning("SnippetProcess", "page == null");
break; // no more available
}

Loading…
Cancel
Save