added more logging to EcoFS

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4661 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent fb541f9162
commit 6e36c156e8

@ -34,6 +34,8 @@ import java.io.IOException;
import java.io.RandomAccessFile; import java.io.RandomAccessFile;
import java.util.Iterator; import java.util.Iterator;
import de.anomic.server.logging.serverLog;
/** /**
* The EcoFS is a flat file with records of fixed length. The file does not contain * The EcoFS is a flat file with records of fixed length. The file does not contain
* any meta information and the first record starts right at file position 0 * any meta information and the first record starts right at file position 0
@ -528,8 +530,10 @@ public class kelondroEcoFS {
public static class ChunkIterator implements Iterator<byte[]> { public static class ChunkIterator implements Iterator<byte[]> {
private int recordsize, chunksize; private int recordsize, chunksize, chunkcounter;
private DataInputStream stream; private DataInputStream stream;
private serverLog log;
private File file;
/** /**
* create a ChunkIterator * create a ChunkIterator
@ -540,12 +544,15 @@ public class kelondroEcoFS {
* @param chunksize: the size of the chunks that are returned by next(). remaining bytes until the lenght of recordsize are skipped * @param chunksize: the size of the chunks that are returned by next(). remaining bytes until the lenght of recordsize are skipped
* @throws FileNotFoundException * @throws FileNotFoundException
*/ */
public ChunkIterator(File file, int recordsize, int chunksize) throws FileNotFoundException { public ChunkIterator(File file, int recordsize, int chunksize, serverLog log) throws FileNotFoundException {
assert (file.exists()); assert (file.exists());
assert file.length() % recordsize == 0; assert file.length() % recordsize == 0;
this.recordsize = recordsize; this.recordsize = recordsize;
this.chunksize = chunksize; this.chunksize = chunksize;
this.chunkcounter = 0; // only for logging
this.stream = new DataInputStream(new BufferedInputStream(new FileInputStream(file), 64 * 1024)); this.stream = new DataInputStream(new BufferedInputStream(new FileInputStream(file), 64 * 1024));
this.log = log;
this.file = file;
} }
public boolean hasNext() { public boolean hasNext() {
@ -570,7 +577,11 @@ public class kelondroEcoFS {
} }
return chunk; return chunk;
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); if (log == null) {
serverLog.logWarning("kelondroEcoFS", "ChunkIterator for file " + file.toString() + " ended with " + e.getCause().getMessage() + " at chunk " + this.chunkcounter, e);
} else {
log.logWarning("ChunkIterator for file " + file.toString() + " ended with " + e.getCause().getMessage() + " at chunk " + this.chunkcounter, e);
}
this.stream = null; this.stream = null;
return null; return null;
} }

@ -139,7 +139,7 @@ public class kelondroEcoTable implements kelondroIndex {
} else { } else {
byte[] record; byte[] record;
key = new byte[rowdef.primaryKeyLength]; key = new byte[rowdef.primaryKeyLength];
Iterator<byte[]> ri = new kelondroEcoFS.ChunkIterator(tablefile, rowdef.objectsize, rowdef.objectsize); Iterator<byte[]> ri = new kelondroEcoFS.ChunkIterator(tablefile, rowdef.objectsize, rowdef.objectsize, null);
while (ri.hasNext()) { while (ri.hasNext()) {
record = ri.next(); record = ri.next();
assert record != null; assert record != null;
@ -217,7 +217,7 @@ public class kelondroEcoTable implements kelondroIndex {
*/ */
public Iterator<byte[]> keyIterator(File file, kelondroRow rowdef) throws FileNotFoundException { public Iterator<byte[]> keyIterator(File file, kelondroRow rowdef) throws FileNotFoundException {
assert rowdef.primaryKeyIndex == 0; assert rowdef.primaryKeyIndex == 0;
return new kelondroEcoFS.ChunkIterator(file, rowdef.objectsize, rowdef.primaryKeyLength); return new kelondroEcoFS.ChunkIterator(file, rowdef.objectsize, rowdef.primaryKeyLength, null);
} }
public static long tableSize(File tablefile, int recordsize) { public static long tableSize(File tablefile, int recordsize) {

Loading…
Cancel
Save