added a catch Exception to all thread to check if any of them silently dies without any other notification

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5922 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 16 years ago
parent 083533e5ec
commit c097531e3d

@ -533,20 +533,28 @@ final class memprofiler extends Thread {
}
public void run() {
int seconds0 = 0, kilobytes0 = 0;
int seconds1 = 0, kilobytes1 = 0;
while(run) {
memChart.setColor("FF0000");
seconds1 = (int) ((System.currentTimeMillis() - start) / 1000);
kilobytes1 = (int) (MemoryControl.used() / 1024);
memChart.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, seconds0, kilobytes0, seconds1, kilobytes1);
seconds0 = seconds1;
kilobytes0 = kilobytes1;
try {Thread.sleep(100);} catch (final InterruptedException e) {}
try {
int seconds0 = 0, kilobytes0 = 0;
int seconds1 = 0, kilobytes1 = 0;
while (run) {
memChart.setColor("FF0000");
seconds1 = (int) ((System.currentTimeMillis() - start) / 1000);
kilobytes1 = (int) (MemoryControl.used() / 1024);
memChart.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, seconds0, kilobytes0, seconds1, kilobytes1);
seconds0 = seconds1;
kilobytes0 = kilobytes1;
try {Thread.sleep(100);} catch (final InterruptedException e) {}
}
} catch (final Exception e) {
e.printStackTrace();
}
try {
ImageIO.write(memChart.getImage(), "png", outputFile);
} catch (final IOException e) {}
} catch (final IOException e) {
// do noting
} catch (final Exception e) {
e.printStackTrace();
}
}
public void terminate() {

@ -87,17 +87,21 @@ public class URLAnalysis {
}
public void run() {
yacyURL url;
Pattern p = Pattern.compile("~|\\(|\\)|\\+|-|@|:|%|\\.|;|_");
while (true) {
try {
url = in.take();
if (url == poison) break;
update(url.getHost().replaceAll("-", "\\.").split("\\."));
update(p.matcher(url.getPath()).replaceAll("/").split("/"));
} catch (InterruptedException e) {
e.printStackTrace();
try {
yacyURL url;
Pattern p = Pattern.compile("~|\\(|\\)|\\+|-|@|:|%|\\.|;|_");
while (true) {
try {
url = in.take();
if (url == poison) break;
update(url.getHost().replaceAll("-", "\\.").split("\\."));
update(p.matcher(url.getPath()).replaceAll("/").split("/"));
} catch (InterruptedException e) {
e.printStackTrace();
}
}
} catch (Exception e) {
e.printStackTrace();
}
}

@ -1332,7 +1332,11 @@ public final class httpdProxyHandler {
) {
out.write(buffer, 0, len);
}
} catch (final IOException e) {}
} catch (final IOException e) {
// do nothing
} catch (Exception e) {
e.printStackTrace();
}
}
public void pleaseTerminate() {

@ -41,7 +41,7 @@ import de.anomic.kelondro.order.ByteOrder;
import de.anomic.kelondro.order.CloneableIterator;
import de.anomic.kelondro.util.ByteArray;
public class BLOBCompressor extends Thread implements BLOB {
public class BLOBCompressor implements BLOB {
static byte[] gzipMagic = {(byte) 'z', (byte) '|'}; // magic for gzip-encoded content
static byte[] plainMagic = {(byte) 'p', (byte) '|'}; // magic for plain content (no encoding)

@ -377,6 +377,9 @@ public final class MetadataRepository implements Iterable<byte[]> {
} catch (final IOException e) {
e.printStackTrace();
run = false;
} catch (final Exception e) {
e.printStackTrace();
run = false;
}
Log.logInfo("URLDBCLEANER", "UrldbCleaner-Thread stopped");
}
@ -512,6 +515,9 @@ public final class MetadataRepository implements Iterable<byte[]> {
} catch (final IOException e) {
e.printStackTrace();
this.failure = e.getMessage();
} catch (final Exception e) {
e.printStackTrace();
this.failure = e.getMessage();
}
// terminate process
}

@ -191,20 +191,24 @@ public class ReferenceOrder {
int p = this.start;
String dom;
Integer count;
while (p < this.end) {
iEntry = new WordReferenceVars(new WordReferenceRow(container.get(p++, false)));
this.decodedEntries.add(iEntry);
// find min/max
if (this.entryMin == null) this.entryMin = iEntry.clone(); else this.entryMin.min(iEntry);
if (this.entryMax == null) this.entryMax = iEntry.clone(); else this.entryMax.max(iEntry);
// update domcount
dom = iEntry.metadataHash().substring(6);
count = doms.get(dom);
if (count == null) {
doms.put(dom, int1);
} else {
doms.put(dom, Integer.valueOf(count.intValue() + 1));
try {
while (p < this.end) {
iEntry = new WordReferenceVars(new WordReferenceRow(container.get(p++, false)));
this.decodedEntries.add(iEntry);
// find min/max
if (this.entryMin == null) this.entryMin = iEntry.clone(); else this.entryMin.min(iEntry);
if (this.entryMax == null) this.entryMax = iEntry.clone(); else this.entryMax.max(iEntry);
// update domcount
dom = iEntry.metadataHash().substring(6);
count = doms.get(dom);
if (count == null) {
doms.put(dom, int1);
} else {
doms.put(dom, Integer.valueOf(count.intValue() + 1));
}
}
} catch (final Exception e) {
e.printStackTrace();
}
}

@ -228,7 +228,11 @@ public final class plasmaSearchEvent {
// sort the local containers and truncate it to a limited count,
// so following sortings together with the global results will be fast
rankedCache.execQuery();
try {
rankedCache.execQuery();
} catch (final Exception e) {
e.printStackTrace();
}
}
}
@ -508,39 +512,43 @@ public final class plasmaSearchEvent {
// start fetching urls and snippets
URLMetadataRow page;
final int fetchAhead = snippetMode == 0 ? 0 : 10;
while (System.currentTimeMillis() < this.timeout) {
this.lastLifeSign = System.currentTimeMillis();
// check if we have enough
if ((query.contentdom == plasmaSearchQuery.CONTENTDOM_IMAGE) && (images.size() >= query.neededResults() + fetchAhead)) break;
if ((query.contentdom != plasmaSearchQuery.CONTENTDOM_IMAGE) && (result.size() >= query.neededResults() + fetchAhead)) break;
// get next entry
page = rankedCache.bestURL(true);
if (page == null) {
if (!anyRemoteSearchAlive()) break; // we cannot expect more results
// if we did not get another entry, sleep some time and try again
try {Thread.sleep(100);} catch (final InterruptedException e1) {}
continue;
}
if (result.exists(page.hash().hashCode())) continue;
if (failedURLs.get(page.hash()) != null) continue;
// try secondary search
prepareSecondarySearch(); // will be executed only once
final ResultEntry resultEntry = obtainResultEntry(page, snippetMode);
if (resultEntry == null) continue; // the entry had some problems, cannot be used
urlRetrievalAllTime += resultEntry.dbRetrievalTime;
snippetComputationAllTime += resultEntry.snippetComputationTime;
//System.out.println("+++DEBUG-resultWorker+++ fetched " + resultEntry.urlstring());
// place the result to the result vector
if (!result.exists(resultEntry)) {
result.push(resultEntry, Long.valueOf(rankedCache.getOrder().cardinal(resultEntry.word())));
rankedCache.addReferences(resultEntry);
try {
while (System.currentTimeMillis() < this.timeout) {
this.lastLifeSign = System.currentTimeMillis();
// check if we have enough
if ((query.contentdom == plasmaSearchQuery.CONTENTDOM_IMAGE) && (images.size() >= query.neededResults() + fetchAhead)) break;
if ((query.contentdom != plasmaSearchQuery.CONTENTDOM_IMAGE) && (result.size() >= query.neededResults() + fetchAhead)) break;
// get next entry
page = rankedCache.bestURL(true);
if (page == null) {
if (!anyRemoteSearchAlive()) break; // we cannot expect more results
// if we did not get another entry, sleep some time and try again
try {Thread.sleep(100);} catch (final InterruptedException e1) {}
continue;
}
if (result.exists(page.hash().hashCode())) continue;
if (failedURLs.get(page.hash()) != null) continue;
// try secondary search
prepareSecondarySearch(); // will be executed only once
final ResultEntry resultEntry = obtainResultEntry(page, snippetMode);
if (resultEntry == null) continue; // the entry had some problems, cannot be used
urlRetrievalAllTime += resultEntry.dbRetrievalTime;
snippetComputationAllTime += resultEntry.snippetComputationTime;
//System.out.println("+++DEBUG-resultWorker+++ fetched " + resultEntry.urlstring());
// place the result to the result vector
if (!result.exists(resultEntry)) {
result.push(resultEntry, Long.valueOf(rankedCache.getOrder().cardinal(resultEntry.word())));
rankedCache.addReferences(resultEntry);
}
//System.out.println("DEBUG SNIPPET_LOADING: thread " + id + " got " + resultEntry.url());
}
//System.out.println("DEBUG SNIPPET_LOADING: thread " + id + " got " + resultEntry.url());
} catch (final Exception e) {
e.printStackTrace();
}
Log.logInfo("SEARCH", "resultWorker thread " + id + " terminated");
}

@ -2238,6 +2238,8 @@ class delayedShutdown extends Thread {
Thread.sleep(delay);
} catch (final InterruptedException e) {
sb.getLog().logInfo("interrupted delayed shutdown");
} catch (final Exception e) {
e.printStackTrace();
}
this.sb.terminate();
}

@ -706,7 +706,9 @@ public final class plasmaWordIndex {
}
}
}
} catch (IOException e) {
} catch (final IOException e) {
e.printStackTrace();
} catch (final Exception e) {
e.printStackTrace();
}
Log.logInfo("INDEXCLEANER", "IndexCleaner-Thread stopped");

@ -620,6 +620,8 @@ public final class serverCore extends serverAbstractBusyThread implements server
listen();
} catch (final IOException e) {
System.err.println("ERROR: (internal) " + e);
} catch (final Exception e) {
e.printStackTrace();
} finally {
try {
if ((this.controlSocket != null) && (! this.controlSocket.isClosed())) {
@ -925,7 +927,8 @@ public final class serverCore extends serverAbstractBusyThread implements server
try {
Thread.sleep(delay);
} catch (final InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (final Exception e) {
e.printStackTrace();
}

@ -57,14 +57,18 @@ public class serverProfiling extends Thread {
}
public void run() {
while (running) {
update("memory", Long.valueOf(MemoryControl.used()), true);
try {
Thread.sleep(this.delaytime);
} catch (final InterruptedException e) {
this.running = false;
}
}
try {
while (running) {
update("memory", Long.valueOf(MemoryControl.used()), true);
try {
Thread.sleep(this.delaytime);
} catch (final InterruptedException e) {
this.running = false;
}
}
} catch (final Exception e) {
e.printStackTrace();
}
}
public static void update(final String eventName, final Object eventPayload, boolean useProtection) {

@ -73,7 +73,11 @@ public class consoleInterface extends Thread
output.add(line);
}
dataIsRead.release();
} catch(final IOException ix) { log.logWarning("logpoint 6 " + ix.getMessage());}
} catch (final IOException ix) {
log.logWarning("logpoint 6 " + ix.getMessage());
} catch (final Exception e) {
e.printStackTrace();
}
}
/**

@ -107,7 +107,9 @@ public class mediawikiIndex {
public void run() {
try {
createIndex(this.wikimediaxml);
} catch (IOException e) {
} catch (final IOException e) {
} catch (final Exception e) {
e.printStackTrace();
}
}
}

@ -29,7 +29,6 @@ import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UTFDataFormatException;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;

@ -113,20 +113,24 @@ public class yacySearch extends Thread {
}
public void run() {
this.urls = yacyClient.search(
wordIndex.peers().mySeed(),
wordhashes, excludehashes, urlhashes, prefer, filter, language, count, maxDistance, global, partitions,
targetPeer, wordIndex, crawlResults, containerCache, abstractCache,
blacklist, rankingProfile, constraint);
if (urls != null) {
// urls is an array of url hashes. this is only used for log output
final StringBuilder urllist = new StringBuilder(this.urls.length * 13);
for (int i = 0; i < this.urls.length; i++) urllist.append(this.urls[i]).append(' ');
yacyCore.log.logInfo("REMOTE SEARCH - remote peer " + targetPeer.hash + ":" + targetPeer.getName() + " contributed " + urls.length + " links for word hash " + wordhashes + ": " + new String(urllist));
wordIndex.peers().mySeed().incRI(urls.length);
wordIndex.peers().mySeed().incRU(urls.length);
} else {
yacyCore.log.logInfo("REMOTE SEARCH - no answer from remote peer " + targetPeer.hash + ":" + targetPeer.getName());
try {
this.urls = yacyClient.search(
wordIndex.peers().mySeed(),
wordhashes, excludehashes, urlhashes, prefer, filter, language, count, maxDistance, global, partitions,
targetPeer, wordIndex, crawlResults, containerCache, abstractCache,
blacklist, rankingProfile, constraint);
if (urls != null) {
// urls is an array of url hashes. this is only used for log output
final StringBuilder urllist = new StringBuilder(this.urls.length * 13);
for (int i = 0; i < this.urls.length; i++) urllist.append(this.urls[i]).append(' ');
yacyCore.log.logInfo("REMOTE SEARCH - remote peer " + targetPeer.hash + ":" + targetPeer.getName() + " contributed " + urls.length + " links for word hash " + wordhashes + ": " + new String(urllist));
wordIndex.peers().mySeed().incRI(urls.length);
wordIndex.peers().mySeed().incRU(urls.length);
} else {
yacyCore.log.logInfo("REMOTE SEARCH - no answer from remote peer " + targetPeer.hash + ":" + targetPeer.getName());
}
} catch (final Exception e) {
e.printStackTrace();
}
}

Loading…
Cancel
Save