Merge branch 'yacy:master' into master

pull/678/head
smokingwheels 3 months ago committed by GitHub
commit 4967b2e0e8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -30,9 +30,7 @@ HTTPC.level = INFO
DHT-OUT.level = INFO
SWITCHBOARD.level = INFO
DHT.level = INFO
HeapReader.level = INFO
Heap.level = INFO
KELONDRO.level = INFO
# UPnP related
UPNP.level = INFO

@ -212,7 +212,7 @@ public class ArrayStack implements BLOB {
sortedItems.put(Long.valueOf(time), new blobItem(d, f, oneBlob));
} catch (final IOException e) {
if (deleteonfail) {
ConcurrentLog.warn("ArrayStack", "cannot read file " + f.getName() + ", deleting it (smart fail; alternative would be: crash; required user action would be same as deletion)");
ConcurrentLog.warn("KELONDRO", "ArrayStack: cannot read file " + f.getName() + ", deleting it (smart fail; alternative would be: crash; required user action would be same as deletion)");
f.delete();
} else {
throw new IOException(e.getMessage(), e);
@ -279,7 +279,7 @@ public class ArrayStack implements BLOB {
return;
}
}
ConcurrentLog.severe("BLOBArray", "file " + location + " cannot be unmounted. The file " + ((location.exists()) ? "exists." : "does not exist."));
ConcurrentLog.severe("KELONDRO", "BLOBArray: file " + location + " cannot be unmounted. The file " + ((location.exists()) ? "exists." : "does not exist."));
}
private File unmount(final int idx) {
@ -624,7 +624,7 @@ public class ArrayStack implements BLOB {
} catch (final InterruptedException e) {
Thread.currentThread().interrupt();
} catch (final ExecutionException e) {
ConcurrentLog.severe("ArrayStack", "", e);
ConcurrentLog.severe("KELONDRO", "ArrayStack", e);
throw new RuntimeException(e.getCause());
}
//System.out.println("*DEBUG SplitTable fail.time = " + (System.currentTimeMillis() - start) + " ms");
@ -701,10 +701,10 @@ public class ArrayStack implements BLOB {
final byte[] n = b.get(this.key);
if (n != null) return n;
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "BlobValues - IOException: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: BlobValues - IOException: " + e.getMessage(), e);
return null;
} catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "BlobValues - RowSpaceExceededException: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: BlobValues - RowSpaceExceededException: " + e.getMessage(), e);
break;
}
}
@ -758,7 +758,7 @@ public class ArrayStack implements BLOB {
final long l = b.length(this.key);
if (l >= 0) return Long.valueOf(l);
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "", e);
ConcurrentLog.severe("KELONDRO", "ArrayStack", e);
return null;
}
}
@ -891,10 +891,10 @@ public class ArrayStack implements BLOB {
try {
final boolean terminated = DELETE_EXECUTOR.awaitTermination(timeout, TimeUnit.SECONDS);
if(!terminated) {
ConcurrentLog.warn("ArrayStack", "Delete executor service could not terminated within " + timeout + " second");
ConcurrentLog.warn("KELONDRO", "ArrayStack: Delete executor service could not terminated within " + timeout + " second");
}
} catch (InterruptedException e) {
ConcurrentLog.warn("ArrayStack", "Interrupted before termination of the delete executor service");
ConcurrentLog.warn("KELONDRO", "ArrayStack: Interrupted before termination of the delete executor service");
}
}
@ -925,34 +925,34 @@ public class ArrayStack implements BLOB {
final File newFile, final int writeBuffer) {
if (f2 == null) {
// this is a rewrite
ConcurrentLog.info("BLOBArray", "rewrite of " + f1.getName());
ConcurrentLog.info("KELONDRO", "BLOBArray: rewrite of " + f1.getName());
final File resultFile = rewriteWorker(factory, this.keylength, this.ordering, f1, newFile, writeBuffer);
if (resultFile == null) {
ConcurrentLog.warn("BLOBArray", "rewrite of file " + f1 + " returned null. newFile = " + newFile);
ConcurrentLog.warn("KELONDRO", "BLOBArray: rewrite of file " + f1 + " returned null. newFile = " + newFile);
return null;
}
try {
mountBLOB(resultFile, false);
} catch (final IOException e) {
ConcurrentLog.warn("BLOBArray", "rewrite of file " + f1 + " successfull, but read failed. resultFile = " + resultFile);
ConcurrentLog.warn("KELONDRO", "BLOBArray: rewrite of file " + f1 + " successfull, but read failed. resultFile = " + resultFile);
return null;
}
ConcurrentLog.info("BLOBArray", "rewrite of " + f1.getName() + " into " + resultFile);
ConcurrentLog.info("KELONDRO", "BLOBArray: rewrite of " + f1.getName() + " into " + resultFile);
return resultFile;
}
ConcurrentLog.info("BLOBArray", "merging " + f1.getName() + " with " + f2.getName());
ConcurrentLog.info("KELONDRO", "BLOBArray: merging " + f1.getName() + " with " + f2.getName());
final File resultFile = mergeWorker(factory, this.keylength, this.ordering, f1, f2, newFile, writeBuffer);
if (resultFile == null) {
ConcurrentLog.warn("BLOBArray", "merge of files " + f1 + ", " + f2 + " returned null. newFile = " + newFile);
ConcurrentLog.warn("KELONDRO", "BLOBArray: merge of files " + f1 + ", " + f2 + " returned null. newFile = " + newFile);
return null;
}
try {
mountBLOB(resultFile, false);
} catch (final IOException e) {
ConcurrentLog.warn("BLOBArray", "merge of files " + f1 + ", " + f2 + " successfull, but read failed. resultFile = " + resultFile);
ConcurrentLog.warn("KELONDRO", "BLOBArray: merge of files " + f1 + ", " + f2 + " successfull, but read failed. resultFile = " + resultFile);
return null;
}
ConcurrentLog.info("BLOBArray", "merged " + f1.getName() + " with " + f2.getName() + " into " + resultFile);
ConcurrentLog.info("KELONDRO", "BLOBArray: merged " + f1.getName() + " with " + f2.getName() + " into " + resultFile);
return resultFile;
}
@ -988,12 +988,12 @@ public class ArrayStack implements BLOB {
merge(i1, i2, order, writer);
writer.close(true);
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot writing or close writing merge, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot writing or close writing merge, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
HeapWriter.delete(tmpFile);
HeapWriter.delete(newFile);
return null;
} catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because of memory failure: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because of memory failure: " + e.getMessage(), e);
HeapWriter.delete(tmpFile);
HeapWriter.delete(newFile);
return null;
@ -1003,13 +1003,13 @@ public class ArrayStack implements BLOB {
HeapWriter.delete(f2);
return newFile;
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because input files cannot be read, f2 = " + f2.toString() + ": " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because input files cannot be read, f2 = " + f2.toString() + ": " + e.getMessage(), e);
return null;
} finally {
if (i2 != null) i2.close();
}
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because input files cannot be read, f1 = " + f1.toString() + ": " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because input files cannot be read, f1 = " + f1.toString() + ": " + e.getMessage(), e);
return null;
} finally {
if (i1 != null) i1.close();
@ -1025,7 +1025,7 @@ public class ArrayStack implements BLOB {
try {
i = new ReferenceIterator<ReferenceType>(f, factory);
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot rewrite because input file cannot be read, f = " + f.toString() + ": " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot rewrite because input file cannot be read, f = " + f.toString() + ": " + e.getMessage(), e);
return null;
}
if (!i.hasNext()) {
@ -1040,12 +1040,12 @@ public class ArrayStack implements BLOB {
writer.close(true);
i.close();
} catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot writing or close writing rewrite, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot writing or close writing rewrite, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
FileUtils.deletedelete(tmpFile);
FileUtils.deletedelete(newFile);
return null;
} catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "cannot rewrite because of memory failure: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot rewrite because of memory failure: " + e.getMessage(), e);
FileUtils.deletedelete(tmpFile);
FileUtils.deletedelete(newFile);
return null;
@ -1072,7 +1072,7 @@ public class ArrayStack implements BLOB {
e = ordering.compare(c1.getTermHash(), c2.getTermHash());
if (e < 0) {
s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection());
if (i1.hasNext()) {
c1lh = c1.getTermHash();
@ -1085,7 +1085,7 @@ public class ArrayStack implements BLOB {
}
if (e > 0) {
s = c2.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
writer.add(c2.getTermHash(), c2.exportCollection());
if (i2.hasNext()) {
c2lh = c2.getTermHash();
@ -1100,7 +1100,7 @@ public class ArrayStack implements BLOB {
// merge the entries
c1 = c1.merge(c2);
s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection());
c1lh = c1.getTermHash();
c2lh = c2.getTermHash();
@ -1130,7 +1130,7 @@ public class ArrayStack implements BLOB {
while (c1 != null) {
//System.out.println("FLUSH REMAINING 1: " + c1.getWordHash());
s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection());
if (i1.hasNext()) {
c1lh = c1.getTermHash();
@ -1143,7 +1143,7 @@ public class ArrayStack implements BLOB {
while (c2 != null) {
//System.out.println("FLUSH REMAINING 2: " + c2.getWordHash());
s = c2.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
writer.add(c2.getTermHash(), c2.exportCollection());
if (i2.hasNext()) {
c2lh = c2.getTermHash();
@ -1167,7 +1167,7 @@ public class ArrayStack implements BLOB {
while (true) {
assert c != null;
s = c.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c.getTermHash()) + " by " + s + " to " + c.size() + " entries");
if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c.getTermHash()) + " by " + s + " to " + c.size() + " entries");
writer.add(c.getTermHash(), c.exportCollection());
if (i.hasNext()) {
clh = c.getTermHash();

@ -99,7 +99,7 @@ public class BEncodedHeap implements MapStore {
try {
return this.table.keys(true, false);
} catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeap", "returning empty iterator for failed key iteration: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "BEncodedHeap: returning empty iterator for failed key iteration: " + e.getMessage(), e);
return new CloneableIterator<byte[]>(){
@Override
@ -620,7 +620,7 @@ public class BEncodedHeap implements MapStore {
this.table = new Heap(location, keylen, order, buffermax);
return iter;
} catch (final IOException e ) {
ConcurrentLog.severe("PropertiesTable", e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "PropertiesTable " + e.getMessage(), e);
return null;
}
}

@ -97,7 +97,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime();
} catch (final ParseException e) {
ConcurrentLog.severe("BEncodedHeapBag", "", e);
ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag", e);
continue;
}
time = d.getTime();
@ -134,11 +134,11 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
t.remove(maxf);
f = new File(this.baseDir, maxf);
try {
ConcurrentLog.info("BEncodedHeapBag", "opening partial heap " + f);
ConcurrentLog.info("KELONDRO", "BEncodedHeapBag: opening partial heap " + f);
BEncodedHeap heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax);
this.bag.put(maxf, heap);
} catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeapBag", "error opening partial heap " + f);
ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag: error opening partial heap " + f);
}
}
}
@ -187,7 +187,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try {
heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax);
} catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeapBag", "unable to open new heap file: " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag: unable to open new heap file: " + e.getMessage(), e);
return null;
}
this.bag.put(this.current, heap);
@ -205,7 +205,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime();
} catch (final ParseException e) {
ConcurrentLog.severe("BEncodedHeapBag", "", e);
ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag", e);
d = 0;
}
if (d + this.fileAgeLimit < t || new File(this.baseDir, name).length() >= this.fileSizeLimit) {

@ -146,7 +146,7 @@ public class BEncodedHeapShard extends AbstractMapStore implements MapStore {
// open all tables of this shard
for (final String element : tablefile) {
if (this.shardMethod.isShardPart(element)) {
ConcurrentLog.info("BEncodedHeapShard", "opening partial shard " + element);
ConcurrentLog.info("KELONDRO", "BEncodedHeapShard: opening partial shard " + element);
MapStore bag = openBag(element);
this.shard.put(this.shardMethod.getShardName(element), bag);
}

@ -158,7 +158,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
baos.close();
return baos.toByteArray();
} catch (final IOException e) {
ConcurrentLog.severe("Compressor", "", e);
ConcurrentLog.severe("KELONDRO", "Compressor", e);
return null;
}
}
@ -219,7 +219,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ignored) {
/* When interrupted, simply return null */
ConcurrentLog.fine("Compressor", "Interrupted while acquiring a synchronzation lock on get()");
ConcurrentLog.fine("KELONDRO", "Compressor: Interrupted while acquiring a synchronzation lock on get()");
}
if(locked) {
try {
@ -241,7 +241,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
}
return decompress(b);
}
ConcurrentLog.fine("Compressor", "Could not acquire a synchronization lock for retrieval within " + this.lockTimeout + " milliseconds");
ConcurrentLog.fine("KELONDRO", "Compressor: Could not acquire a synchronization lock for retrieval within " + this.lockTimeout + " milliseconds");
return b;
}
@ -318,7 +318,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ignored) {
/* When interrupted, simply nothing is inserted */
ConcurrentLog.fine("Compressor", "Interrupted while acquiring a synchronzation lock on insert()");
ConcurrentLog.fine("KELONDRO", "Compressor: Interrupted while acquiring a synchronzation lock on insert()");
}
if(locked) {
try {
@ -349,7 +349,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
flushAll();
}
} else {
ConcurrentLog.fine("Compressor", "Could not acquire a synchronization lock for insertion within " + this.lockTimeout + " milliseconds");
ConcurrentLog.fine("KELONDRO", "Compressor: Could not acquire a synchronization lock for insertion within " + this.lockTimeout + " milliseconds");
}
}

@ -89,7 +89,7 @@ public final class Heap extends HeapModifier implements BLOB {
this.buffermax = buffermax;
this.buffer = new TreeMap<byte[], byte[]>(ordering);
this.buffersize = 0;
ConcurrentLog.info("Heap", "initializing heap " + this.name());
ConcurrentLog.info("KELONDRO", "Heap: initializing heap " + this.name());
/*
// DEBUG
Iterator<byte[]> i = index.keys(true, null);
@ -279,7 +279,7 @@ public final class Heap extends HeapModifier implements BLOB {
*/
@Override
public synchronized void clear() throws IOException {
ConcurrentLog.info("Heap", "clearing heap " + this.name());
ConcurrentLog.info("KELONDRO", "Heap: clearing heap " + this.name());
assert this.buffer != null;
if (this.buffer == null) this.buffer = new TreeMap<byte[], byte[]>(this.ordering);
this.buffer.clear();
@ -292,7 +292,7 @@ public final class Heap extends HeapModifier implements BLOB {
*/
@Override
public synchronized void close(final boolean writeIDX) {
ConcurrentLog.info("Heap", "closing heap " + this.name());
ConcurrentLog.info("KELONDRO", "Heap: closing heap " + this.name());
if (this.file != null && this.buffer != null) {
try {
flushBuffer();

@ -115,8 +115,8 @@ public class HeapModifier extends HeapReader implements BLOB {
//assert seek + size + 4 <= this.file.length() : heapFile.getName() + ": too long size " + size + " in record at " + seek;
long filelength = this.file.length(); // put in separate variable for debugging
if (seek + size + 4 > filelength) {
ConcurrentLog.severe("BLOBHeap", this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
throw new IOException(this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
ConcurrentLog.severe("KELONDRO", "BLOBHeap: " + this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
throw new IOException("BLOBHeap: " + this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
}
super.deleteFingerprint();

@ -58,7 +58,7 @@ public class HeapReader {
//public final static long keepFreeMem = 20 * 1024 * 1024;
private final static ConcurrentLog log = new ConcurrentLog("HeapReader");
private final static ConcurrentLog log = new ConcurrentLog("KELONDRO");
// input values
protected int keylength; // the length of the primary key
@ -107,10 +107,10 @@ public class HeapReader {
}
}
if (!ok) {
log.warn("verification of idx file for " + heapFile.toString() + " failed, re-building index");
log.warn("HeapReader: verification of idx file for " + heapFile.toString() + " failed, re-building index");
initIndexReadFromHeap();
} else {
log.info("using a dump of the index of " + heapFile.toString() + ".");
log.info("HeapReader: using a dump of the index of " + heapFile.toString() + ".");
}
} else {
// if we did not have a dump, create a new index
@ -161,7 +161,7 @@ public class HeapReader {
// if this is successful, return true; otherwise false
String fingerprint = fingerprintFileHash(this.heapFile);
if (fingerprint == null) {
log.severe("cannot generate a fingerprint for " + this.heapFile + ": null");
log.severe("HeapReader: cannot generate a fingerprint for " + this.heapFile + ": null");
return false;
}
this.fingerprintFileIdx = HeapWriter.fingerprintIndexFile(this.heapFile, fingerprint);
@ -188,7 +188,7 @@ public class HeapReader {
// check saturation
if (this.index instanceof RowHandleMap) {
int[] saturation = ((RowHandleMap) this.index).saturation(); // {<the maximum length of consecutive equal-beginning bytes in the key>, <the minimum number of leading zeros in the second column>}
log.info("saturation of " + this.fingerprintFileIdx.getName() + ": keylength = " + saturation[0] + ", vallength = " + saturation[1] + ", size = " + this.index.size() +
log.info("HeapReader: saturation of " + this.fingerprintFileIdx.getName() + ": keylength = " + saturation[0] + ", vallength = " + saturation[1] + ", size = " + this.index.size() +
", maximum saving for index-compression = " + (saturation[0] * this.index.size() / 1024 / 1024) + " MB" +
", exact saving for value-compression = " + (saturation[1] * this.index.size() / 1024 / 1024) + " MB");
}
@ -249,7 +249,7 @@ public class HeapReader {
private void initIndexReadFromHeap() throws IOException {
// this initializes the this.index object by reading positions from the heap file
log.info("generating index for " + this.heapFile.toString() + ", " + (this.file.length() / 1024 / 1024) + " MB. Please wait.");
log.info("HeapReader: generating index for " + this.heapFile.toString() + ", " + (this.file.length() / 1024 / 1024) + " MB. Please wait.");
this.free = new Gap();
RowHandleMap.initDataConsumer indexready = RowHandleMap.asynchronusInitializer(this.name() + ".initializer", this.keylength, this.ordering, 8, Math.max(10, (int) (Runtime.getRuntime().freeMemory() / (10 * 1024 * 1024))));
@ -268,7 +268,7 @@ public class HeapReader {
//assert reclen > 0 : " reclen == 0 at seek pos " + seek;
if (reclen == 0) {
// very bad file inconsistency
log.severe("reclen == 0 at seek pos " + seek + " in file " + this.heapFile);
log.severe("HeapReader: reclen == 0 at seek pos " + seek + " in file " + this.heapFile);
this.file.setLength(seek); // delete everything else at the remaining of the file :-(
break loop;
}
@ -295,7 +295,7 @@ public class HeapReader {
this.file.seek(seek + 4);
Arrays.fill(key, (byte) 0);
this.file.write(key); // mark the place as empty record
log.warn("BLOB " + this.heapFile.getName() + ": skiped not wellformed key " + UTF8.String(key) + " at seek pos " + seek);
log.warn("HeapReader: BLOB " + this.heapFile.getName() + ": skiped not wellformed key " + UTF8.String(key) + " at seek pos " + seek);
}
}
// new seek position
@ -312,7 +312,7 @@ public class HeapReader {
} catch (final ExecutionException e) {
ConcurrentLog.logException(e);
}
log.info("finished index generation for " + this.heapFile.toString() + ", " + this.index.size() + " entries, " + this.free.size() + " gaps.");
log.info("HeapReader: finished index generation for " + this.heapFile.toString() + ", " + this.index.size() + " entries, " + this.free.size() + " gaps.");
}
private void mergeFreeEntries() throws IOException {
@ -340,7 +340,7 @@ public class HeapReader {
lastFree = nextFree;
}
}
log.info("BLOB " + this.heapFile.toString() + ": merged " + merged + " free records");
log.info("HeapReader: BLOB " + this.heapFile.toString() + ": merged " + merged + " free records");
if (merged > 0) deleteFingerprint();
}
}
@ -360,7 +360,7 @@ public class HeapReader {
public int size() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in size(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in size(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0;
}
return (this.index == null) ? 0 : this.index.size();
@ -369,7 +369,7 @@ public class HeapReader {
public boolean isEmpty() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in isEmpty(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in isEmpty(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return true;
}
return this.index.isEmpty();
@ -383,7 +383,7 @@ public class HeapReader {
public boolean containsKey(byte[] key) {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in containsKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in containsKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return false;
}
key = normalizeKey(key);
@ -408,7 +408,7 @@ public class HeapReader {
protected synchronized byte[] firstKey() throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in firstKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in firstKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
synchronized (this.index) {
@ -426,7 +426,7 @@ public class HeapReader {
protected byte[] first() throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in first(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in first(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
synchronized (this.index) {
@ -446,7 +446,7 @@ public class HeapReader {
protected byte[] lastKey() throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in lastKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in lastKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
if (this.index == null) return null;
@ -465,7 +465,7 @@ public class HeapReader {
protected byte[] last() throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in last(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in last(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
synchronized (this.index) {
@ -484,7 +484,7 @@ public class HeapReader {
public byte[] get(byte[] key) throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in get(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in get(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
key = normalizeKey(key);
@ -499,7 +499,7 @@ public class HeapReader {
final int len = this.file.readInt() - this.keylength;
if (len < 0) {
// database file may be corrupted and should be deleted :-((
log.severe("file " + this.file.file() + " corrupted at " + pos + ": negative len. len = " + len + ", pk.len = " + this.keylength);
log.severe("HeapReader: file " + this.file.file() + " corrupted at " + pos + ": negative len. len = " + len + ", pk.len = " + this.keylength);
// to get lazy over that problem (who wants to tell the user to stop operation and delete the file???) we work on like the entry does not exist
this.index.remove(key);
return null;
@ -519,7 +519,7 @@ public class HeapReader {
this.file.readFully(keyf, 0, keyf.length);
if (!this.ordering.equal(key, keyf)) {
// verification of the indexed access failed. we must re-read the index
log.severe("indexed verification access failed for " + this.heapFile.toString());
log.severe("HeapReader: indexed verification access failed for " + this.heapFile.toString());
// this is a severe operation, it should never happen.
// remove entry from index because keeping that element in the index would not make sense
this.index.remove(key);
@ -581,7 +581,7 @@ public class HeapReader {
public long length(byte[] key) throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0;
}
key = normalizeKey(key);
@ -617,17 +617,17 @@ public class HeapReader {
try {
String fingerprint = fingerprintFileHash(this.heapFile);
if (fingerprint == null) {
log.severe("cannot write a dump for " + this.heapFile.getName()+ ": fingerprint is null");
log.severe("HeapReader: cannot write a dump for " + this.heapFile.getName()+ ": fingerprint is null");
} else {
File newFingerprintFileGap = HeapWriter.fingerprintGapFile(this.heapFile, fingerprint);
if (this.fingerprintFileGap != null &&
this.fingerprintFileGap.getName().equals(newFingerprintFileGap.getName()) &&
this.fingerprintFileGap.exists()) {
log.info("using existing gap dump instead of writing a new one: " + this.fingerprintFileGap.getName());
log.info("HeapReader: using existing gap dump instead of writing a new one: " + this.fingerprintFileGap.getName());
} else {
long start = System.currentTimeMillis();
this.free.dump(newFingerprintFileGap);
log.info("wrote a dump for the " + this.free.size() + " gap entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
log.info("HeapReader: wrote a dump for the " + this.free.size() + " gap entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
}
}
this.free.clear();
@ -637,11 +637,11 @@ public class HeapReader {
if (this.fingerprintFileIdx != null &&
this.fingerprintFileIdx.getName().equals(newFingerprintFileIdx.getName()) &&
this.fingerprintFileIdx.exists()) {
log.info("using existing idx dump instead of writing a new one: " + this.fingerprintFileIdx.getName());
log.info("HeapReader: using existing idx dump instead of writing a new one: " + this.fingerprintFileIdx.getName());
} else {
long start = System.currentTimeMillis();
this.index.dump(newFingerprintFileIdx);
log.info("wrote a dump for the " + this.index.size() + " index entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
log.info("HeapReader: wrote a dump for the " + this.index.size() + " index entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
}
}
this.index.close();
@ -656,7 +656,9 @@ public class HeapReader {
this.index = null;
this.closeDate = new Date();
} catch (Throwable e) {ConcurrentLog.logException(e);}
log.info("close HeapFile " + this.heapFile.getName() + "; trace: " + ConcurrentLog.stackTrace());
log.info("HeapReader: close HeapFile " + this.heapFile.getName());
log.fine("trace: " + ConcurrentLog.stackTrace());
}
}
@ -681,7 +683,7 @@ public class HeapReader {
*/
public CloneableIterator<byte[]> keys(final boolean up, final boolean rotating) throws IOException {
if (this.index == null) {
log.severe("this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
synchronized (this.index) {
@ -699,7 +701,7 @@ public class HeapReader {
public CloneableIterator<byte[]> keys(final boolean up, final byte[] firstKey) throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null;
}
synchronized (this.index) {
@ -710,7 +712,7 @@ public class HeapReader {
public long length() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) {
log.severe("this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
log.severe("HeapReader: this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0;
}
synchronized (this.index) {
@ -780,7 +782,7 @@ public class HeapReader {
// read some more bytes to consume the empty record
if (len > 1) {
if (len - 1 != this.is.skipBytes(len - 1)) { // all that is remaining
log.warn("problem skiping " + + len + " bytes in " + this.blobFile.getName());
log.warn("HeapReader: problem skiping " + + len + " bytes in " + this.blobFile.getName());
try {this.is.close();} catch (final IOException e) {}
return null;
}
@ -811,7 +813,7 @@ public class HeapReader {
// the allocation of memory for the payload may fail
// this is bad because we must interrupt the iteration here but the
// process that uses the iteration may think that the iteraton has just been completed
log.severe("out of memory in LookAheadIterator.next0 for file " + this.blobFile.toString(), ee);
log.severe("HeapReader: out of memory in LookAheadIterator.next0 for file " + this.blobFile.toString(), ee);
try {this.is.close();} catch (final IOException e) {}
return null;
}

@ -41,7 +41,7 @@ import net.yacy.kelondro.util.FileUtils;
public final class HeapWriter {
private final static ConcurrentLog log = new ConcurrentLog("HeapWriter");
private final static ConcurrentLog log = new ConcurrentLog("KELONDRO");
public final static byte[] ZERO = new byte[]{0};
private final int keylength; // the length of the primary key
@ -95,7 +95,7 @@ public final class HeapWriter {
try {
fileStream.close();
} catch(IOException ignored) {
log.warn("Could not close output stream on file " + temporaryHeapFile);
log.warn("HeapWriter: Could not close output stream on file " + temporaryHeapFile);
}
throw e;
}
@ -147,9 +147,9 @@ public final class HeapWriter {
// rename the file into final name
if (this.heapFileREADY.exists()) FileUtils.deletedelete(this.heapFileREADY);
boolean renameok = this.heapFileTMP.renameTo(this.heapFileREADY);
if (!renameok) throw new IOException("cannot rename " + this.heapFileTMP + " to " + this.heapFileREADY);
if (!this.heapFileREADY.exists()) throw new IOException("renaming of " + this.heapFileREADY.toString() + " failed: files still exists");
if (this.heapFileTMP.exists()) throw new IOException("renaming to " + this.heapFileTMP.toString() + " failed: file does not exist");
if (!renameok) throw new IOException("HeapWriter: cannot rename " + this.heapFileTMP + " to " + this.heapFileREADY);
if (!this.heapFileREADY.exists()) throw new IOException("HeapWriter: renaming of " + this.heapFileREADY.toString() + " failed: files still exists");
if (this.heapFileTMP.exists()) throw new IOException("HeapWriter: renaming to " + this.heapFileTMP.toString() + " failed: file does not exist");
// generate index and gap files
if (writeIDX && this.index.size() > 3) {
@ -158,11 +158,11 @@ public final class HeapWriter {
long start = System.currentTimeMillis();
String fingerprint = HeapReader.fingerprintFileHash(this.heapFileREADY);
if (fingerprint == null) {
log.severe("cannot write a dump for " + this.heapFileREADY.getName()+ ": fingerprint is null");
log.severe("HeapWriter: cannot write a dump for " + this.heapFileREADY.getName()+ ": fingerprint is null");
} else {
new Gap().dump(fingerprintGapFile(this.heapFileREADY, fingerprint));
this.index.dump(fingerprintIndexFile(this.heapFileREADY, fingerprint));
log.info("wrote a dump for the " + this.index.size() + " index entries of " + this.heapFileREADY.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
log.info("HeapWriter: wrote a dump for the " + this.index.size() + " index entries of " + this.heapFileREADY.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
}
this.index.close();
this.index = null;

@ -124,7 +124,7 @@ public class MapDataMining extends MapHeap {
try {
map = super.get(mapnameb);
} catch (final SpaceExceededException e) {
ConcurrentLog.warn("MapDataMining", e.getMessage());
ConcurrentLog.warn("KELONDRO", "MapDataMining: " + e.getMessage());
break;
}
if (map == null) break;

@ -486,7 +486,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
try {
map = get(nextKey, false);
} catch (final IOException e) {
ConcurrentLog.warn("MapDataMining", e.getMessage());
ConcurrentLog.warn("KELONDRO", "MapDataMining: " + e.getMessage());
continue;
} catch (final SpaceExceededException e) {
ConcurrentLog.logException(e);

@ -184,7 +184,7 @@ public class URIMetadataNode extends SolrDocument /* implements Comparable<URIMe
final String myhash = ASCII.String(this.url.hash());
if (!hashstr.equals(myhash)) {
this.setField(CollectionSchema.id.getSolrFieldName(), myhash);
ConcurrentLog.fine("URIMetadataNode", "updated document.ID of " + urlRaw + " from " + hashstr + " to " + myhash);
ConcurrentLog.fine("KELONDRO", "URIMetadataNode: updated document.ID of " + urlRaw + " from " + hashstr + " to " + myhash);
// ususally the hosthash matches but just to be on the safe site
final String hostidstr = getString(CollectionSchema.host_id_s); // id or empty string
if (!hostidstr.isEmpty() && !hostidstr.equals(this.url.hosthash())) {
@ -704,14 +704,14 @@ public class URIMetadataNode extends SolrDocument /* implements Comparable<URIMe
public static URIMetadataNode importEntry(final String propStr, String collection) {
if (propStr == null || propStr.isEmpty() || propStr.charAt(0) != '{' || !propStr.endsWith("}")) {
ConcurrentLog.severe("URIMetadataNode", "importEntry: propStr is not proper: " + propStr);
ConcurrentLog.severe("KELONDRO", "URIMetadataNode: importEntry: propStr is not proper: " + propStr);
return null;
}
try {
return new URIMetadataNode(MapTools.s2p(propStr.substring(1, propStr.length() - 1)), collection);
} catch (final kelondroException | MalformedURLException e) {
// wrong format
ConcurrentLog.severe("URIMetadataNode", e.getMessage());
ConcurrentLog.severe("KELONDRO", "URIMetadataNode: " + e.getMessage());
return null;
}
}

@ -57,10 +57,10 @@ public class Word {
static {
try {
hashCache = new ConcurrentARC<String, byte[]>(hashCacheSize, Math.min(32, 2 * Runtime.getRuntime().availableProcessors()));
ConcurrentLog.info("Word", "hashCache.size = " + hashCacheSize);
ConcurrentLog.info("KELONDRO", "Word: hashCache.size = " + hashCacheSize);
} catch (final OutOfMemoryError e) {
hashCache = new ConcurrentARC<String, byte[]>(1000, Math.min(8, 1 + Runtime.getRuntime().availableProcessors()));
ConcurrentLog.info("Word", "hashCache.size = " + 1000);
ConcurrentLog.info("KELONDRO", "Word: hashCache.size = " + 1000);
}
}

@ -593,7 +593,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
p--;
worker[p % cores0].add(this.container.get(p, false));
if (p % 100 == 0 && System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("TransformDistributor", "distribution of WordReference entries to worker queues ended with timeout = " + this.maxtime);
ConcurrentLog.warn("KELONDRO", "TransformDistributor: distribution of WordReference entries to worker queues ended with timeout = " + this.maxtime);
break;
}
}
@ -645,7 +645,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
while ((entry = this.in.take()) != WordReferenceRow.poisonRowEntry) {
this.out.put(new WordReferenceVars(new WordReferenceRow(entry), local));
if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("TransformWorker", "normalization of row entries from row to vars ended with timeout = " + this.maxtime);
ConcurrentLog.warn("KELONDRO", "TransformWorker: normalization of row entries from row to vars ended with timeout = " + this.maxtime);
break;
}
}

@ -311,7 +311,7 @@ public final class Row implements Serializable {
try {
setCol(col.encoder, this.offset + clstrt, col.cellwidth, NumberTools.parseLongDecSubstring(token, p + 1));
} catch (final NumberFormatException e) {
ConcurrentLog.severe("kelondroRow", "NumberFormatException for celltype_cardinal, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
ConcurrentLog.severe("KELONDRO", "kelondroRow: NumberFormatException for celltype_cardinal, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
setCol(col.encoder, this.offset + clstrt, col.cellwidth, 0);
}
} else if ((decimalCardinal) && (col.celltype == Column.celltype_binary)) {
@ -319,7 +319,7 @@ public final class Row implements Serializable {
try {
setCol(clstrt, col.cellwidth, new byte[]{(byte) NumberTools.parseIntDecSubstring(token, p + 1)});
} catch (final NumberFormatException e) {
ConcurrentLog.severe("kelondroRow", "NumberFormatException for celltype_binary, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
ConcurrentLog.severe("KELONDRO", "kelondroRow: NumberFormatException for celltype_binary, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
setCol(clstrt, col.cellwidth, new byte[]{0});
}
} else if ((decimalCardinal) && (col.celltype == Column.celltype_bitfield)) {

@ -106,7 +106,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
this.rowdef = rowdef;
this.chunkcount = (int) exportedCollection.getColLong(exp_chunkcount);
if ((this.chunkcount > chunkcachelength / rowdef.objectsize)) {
ConcurrentLog.warn("RowCollection", "corrected wrong chunkcount; chunkcount = " + this.chunkcount + ", chunkcachelength = " + chunkcachelength + ", rowdef.objectsize = " + rowdef.objectsize);
ConcurrentLog.warn("KELONDRO", "RowCollection: corrected wrong chunkcount; chunkcount = " + this.chunkcount + ", chunkcachelength = " + chunkcachelength + ", rowdef.objectsize = " + rowdef.objectsize);
this.chunkcount = chunkcachelength / rowdef.objectsize; // patch problem
}
this.lastTimeWrote = (exportedCollection.getColLong(exp_last_wrote) + 10957) * day;
@ -122,7 +122,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
throw new kelondroException("old collection order does not match with new order; objectOrder.signature = " + rowdef.objectOrder.signature() + ", oldOrder.signature = " + oldOrder.signature());
this.sortBound = (int) exportedCollection.getColLong(exp_order_bound);
if (this.sortBound > this.chunkcount) {
ConcurrentLog.warn("RowCollection", "corrected wrong sortBound; sortBound = " + this.sortBound + ", chunkcount = " + this.chunkcount);
ConcurrentLog.warn("KELONDRO", "RowCollection: corrected wrong sortBound; sortBound = " + this.sortBound + ", chunkcount = " + this.chunkcount);
this.sortBound = this.chunkcount;
}
this.chunkcache = exportedCollection.getColBytes(exp_collection, false);
@ -860,7 +860,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
i--;
}
} catch (final RuntimeException e) {
ConcurrentLog.warn("kelondroRowCollection", e.getMessage(), e);
ConcurrentLog.warn("KELONDRO", "kelondroRowCollection: " + e.getMessage(), e);
} finally {
if (!u) sort();
}

@ -100,7 +100,7 @@ public class RowSet extends RowCollection implements Index, Iterable<Row.Entry>,
}
//assert b.length - exportOverheadSize == size * rowdef.objectsize : "b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize;
if (b.length - exportOverheadSize != alloc) {
ConcurrentLog.severe("RowSet", "exportOverheadSize wrong: b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize);
ConcurrentLog.severe("KELONDRO", "RowSet: exportOverheadSize wrong: b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize);
return new RowSet(rowdef, 0);
}
System.arraycopy(b, (int) exportOverheadSize, chunkcache, 0, chunkcache.length);
@ -509,13 +509,13 @@ public class RowSet extends RowCollection implements Index, Iterable<Row.Entry>,
try {
c0.sort();
} catch (final Throwable e) {
ConcurrentLog.severe("RowSet", "collection corrupted. cleaned. " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "RowSet: collection corrupted. cleaned. " + e.getMessage(), e);
c0.clear();
}
try {
c1.sort();
} catch (final Throwable e) {
ConcurrentLog.severe("RowSet", "collection corrupted. cleaned. " + e.getMessage(), e);
ConcurrentLog.severe("KELONDRO", "RowSet: collection corrupted. cleaned. " + e.getMessage(), e);
c1.clear();
}
int c0i = 0, c1i = 0;

@ -54,7 +54,7 @@ public abstract class AbstractReference implements Reference {
try {
return d == 0 ? 0 : d / positions().size();
} catch (ArithmeticException ex) {
ConcurrentLog.fine("AbstractReference", "word distance calculation:" + ex.getMessage());
ConcurrentLog.fine("KELONDRO", "AbstractReference: word distance calculation:" + ex.getMessage());
return 0;
}
}

@ -48,7 +48,7 @@ import net.yacy.kelondro.util.MemoryControl;
*/
public class IODispatcher extends Thread {
private static final ConcurrentLog log = new ConcurrentLog("IODispatcher");
private static final ConcurrentLog log = new ConcurrentLog("KELONDRO IODispatcher");
private Semaphore controlQueue; // controls that only one io job is running
private final Semaphore termination; // released if thread is safe to terminate

@ -487,7 +487,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashes));
} catch (final SpaceExceededException e) {
reduced = 0;
ConcurrentLog.warn("IndexCell", "not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
ConcurrentLog.warn("KELONDRO", "IndexCell: not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
}
//assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem();
return removed + (reduced / this.array.rowdef().objectsize);
@ -503,7 +503,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashBytes));
} catch (final SpaceExceededException e) {
reduced = 0;
ConcurrentLog.warn("IndexCell", "not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
ConcurrentLog.warn("KELONDRO", "IndexCell: not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
}
//assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem();
return removed || (reduced > 0);

@ -310,14 +310,14 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
int k = 1;
ReferenceContainer<ReferenceType> c = new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(a, this.factory.getRow()));
if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in get() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timout in get() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c;
}
while (entries.hasNext()) {
c = c.merge(new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(entries.next(), this.factory.getRow())));
k++;
if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in get() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timout in get() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c;
}
}
@ -333,7 +333,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
int c = RowSet.importRowCount(a, this.factory.getRow());
assert c >= 0;
if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in count() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timeout in count() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c;
}
while (entries.hasNext()) {
@ -341,7 +341,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
assert c >= 0;
k++;
if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in count() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timeout in count() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c;
}
}
@ -402,7 +402,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkBestSmallFiles(final IODispatcher merger, final long targetFileSize) {
final File[] ff = this.array.unmountBestMatch(2.0f, targetFileSize);
if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink1", "unmountBestMatch(2.0, " + targetFileSize + ")");
ConcurrentLog.info("KELONDRO", "RICELL-shrink1: unmountBestMatch(2.0, " + targetFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true;
}
@ -410,7 +410,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkAnySmallFiles(final IODispatcher merger, final long targetFileSize) {
final File[] ff = this.array.unmountSmallest(targetFileSize);
if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink2", "unmountSmallest(" + targetFileSize + ")");
ConcurrentLog.info("KELONDRO", "RICELL-shrink2: unmountSmallest(" + targetFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true;
}
@ -418,7 +418,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkUpToMaxSizeFiles(final IODispatcher merger, final long maxFileSize) {
final File[] ff = this.array.unmountBestMatch(2.0f, maxFileSize);
if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink3", "unmountBestMatch(2.0, " + maxFileSize + ")");
ConcurrentLog.info("KELONDRO", "RICELL-shrink3: unmountBestMatch(2.0, " + maxFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true;
}
@ -426,7 +426,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkOldFiles(final IODispatcher merger) {
final File ff = this.array.unmountOldest();
if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink4/rewrite", "unmountOldest()");
ConcurrentLog.info("KELONDRO", "RICELL-shrink4/rewrite: unmountOldest()");
merger.merge(ff, null, this.factory, this.array, newContainerBLOBFile());
return true;
}

@ -60,7 +60,7 @@ import net.yacy.kelondro.util.FileUtils;
*/
public final class ReferenceContainerCache<ReferenceType extends Reference> extends AbstractIndex<ReferenceType> implements Index<ReferenceType>, IndexReader<ReferenceType>, Iterable<ReferenceContainer<ReferenceType>> {
private static final ConcurrentLog log = new ConcurrentLog("ReferenceContainerCache");
private static final ConcurrentLog log = new ConcurrentLog("KELONDRO");
private final int termSize;
private final ByteOrder termOrder;

@ -67,15 +67,15 @@ public class ReferenceIterator <ReferenceType extends Reference> extends LookAhe
try {
row = RowSet.importRowSet(entry.getValue(), this.factory.getRow());
if (row == null) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because importRowSet returned null");
ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because importRowSet returned null");
continue; // thats a fail but not as REALLY bad if the whole method would crash here
}
return new ReferenceContainer<ReferenceType>(this.factory, entry.getKey(), row);
} catch (final SpaceExceededException e) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because of too low memory: " + e.toString());
ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because of too low memory: " + e.toString());
continue;
} catch (final Throwable e) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because of error: " + e.toString());
ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because of error: " + e.toString());
continue;
}
}

@ -181,7 +181,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime();
} catch (final ParseException e) {
ConcurrentLog.severe("SplitTable", "", e);
ConcurrentLog.severe("KELONDRO", "SplitTable: ", e);
continue;
}
time = d.getTime();
@ -218,7 +218,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
// open next biggest table
t.remove(maxf);
f = new File(this.path, maxf);
ConcurrentLog.info("kelondroSplitTable", "opening partial eco table " + f);
ConcurrentLog.info("KELONDRO", "SplitTable: opening partial eco table " + f);
Table table;
try {
table = new Table(f, this.rowdef, EcoFSBufferSize, 0, this.useTailCache, this.exceed134217727, false);
@ -226,7 +226,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try {
table = new Table(f, this.rowdef, 0, 0, false, this.exceed134217727, false);
} catch (final SpaceExceededException ee) {
ConcurrentLog.severe("SplitTable", "Table " + f.toString() + " cannot be initialized: " + ee.getMessage(), ee);
ConcurrentLog.severe("KELONDRO", "SplitTable: Table " + f.toString() + " cannot be initialized: " + ee.getMessage(), ee);
continue maxfind;
}
}
@ -373,7 +373,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime();
} catch (final ParseException e) {
ConcurrentLog.severe("SplitTable", "", e);
ConcurrentLog.severe("KELONDRO", "SplitTable", e);
d = 0;
}
if (d + this.fileAgeLimit < t || new File(this.path, name).length() >= this.fileSizeLimit) {

@ -71,7 +71,7 @@ import net.yacy.kelondro.util.kelondroException;
public class Table implements Index, Iterable<Row.Entry> {
// static tracker objects
private final static ConcurrentLog log = new ConcurrentLog("TABLE");
private final static ConcurrentLog log = new ConcurrentLog("KELONDRO TABLE");
/** Map all active table instances by file name */
private final static Map<String, Table> tableTracker = new ConcurrentSkipListMap<String, Table>();

@ -20,16 +20,24 @@ else
ID="id"
fi
# if java executable not found using 'which', let's try 'command'
if [ ! -x "$JAVA" ]
then
JAVA="`command -v java`"
fi
if [ ! -x "$JAVA" ]
then
echo "The java command is not executable."
echo "Either you have not installed java or it is not in your PATH"
echo "Either you have not installed java or it is not in your PATH."
echo "You can also set a path to java manually, in \$JAVA option of $0 script."
#Cron supports setting the path in
#echo "Has this script been invoked by CRON?"
#echo "if so, please set PATH in the crontab, or set the correct path in the variable in this script."
exit 1
fi
usage() {
cat - <<USAGE
startscript for YaCy on UNIX-like systems

Loading…
Cancel
Save