Merge branch 'yacy:master' into master

pull/678/head
smokingwheels 3 months ago committed by GitHub
commit 4967b2e0e8
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -30,9 +30,7 @@ HTTPC.level = INFO
DHT-OUT.level = INFO DHT-OUT.level = INFO
SWITCHBOARD.level = INFO SWITCHBOARD.level = INFO
DHT.level = INFO DHT.level = INFO
HeapReader.level = INFO KELONDRO.level = INFO
Heap.level = INFO
# UPnP related # UPnP related
UPNP.level = INFO UPNP.level = INFO

@ -212,7 +212,7 @@ public class ArrayStack implements BLOB {
sortedItems.put(Long.valueOf(time), new blobItem(d, f, oneBlob)); sortedItems.put(Long.valueOf(time), new blobItem(d, f, oneBlob));
} catch (final IOException e) { } catch (final IOException e) {
if (deleteonfail) { if (deleteonfail) {
ConcurrentLog.warn("ArrayStack", "cannot read file " + f.getName() + ", deleting it (smart fail; alternative would be: crash; required user action would be same as deletion)"); ConcurrentLog.warn("KELONDRO", "ArrayStack: cannot read file " + f.getName() + ", deleting it (smart fail; alternative would be: crash; required user action would be same as deletion)");
f.delete(); f.delete();
} else { } else {
throw new IOException(e.getMessage(), e); throw new IOException(e.getMessage(), e);
@ -279,7 +279,7 @@ public class ArrayStack implements BLOB {
return; return;
} }
} }
ConcurrentLog.severe("BLOBArray", "file " + location + " cannot be unmounted. The file " + ((location.exists()) ? "exists." : "does not exist.")); ConcurrentLog.severe("KELONDRO", "BLOBArray: file " + location + " cannot be unmounted. The file " + ((location.exists()) ? "exists." : "does not exist."));
} }
private File unmount(final int idx) { private File unmount(final int idx) {
@ -624,7 +624,7 @@ public class ArrayStack implements BLOB {
} catch (final InterruptedException e) { } catch (final InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
} catch (final ExecutionException e) { } catch (final ExecutionException e) {
ConcurrentLog.severe("ArrayStack", "", e); ConcurrentLog.severe("KELONDRO", "ArrayStack", e);
throw new RuntimeException(e.getCause()); throw new RuntimeException(e.getCause());
} }
//System.out.println("*DEBUG SplitTable fail.time = " + (System.currentTimeMillis() - start) + " ms"); //System.out.println("*DEBUG SplitTable fail.time = " + (System.currentTimeMillis() - start) + " ms");
@ -701,10 +701,10 @@ public class ArrayStack implements BLOB {
final byte[] n = b.get(this.key); final byte[] n = b.get(this.key);
if (n != null) return n; if (n != null) return n;
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "BlobValues - IOException: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: BlobValues - IOException: " + e.getMessage(), e);
return null; return null;
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "BlobValues - RowSpaceExceededException: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: BlobValues - RowSpaceExceededException: " + e.getMessage(), e);
break; break;
} }
} }
@ -758,7 +758,7 @@ public class ArrayStack implements BLOB {
final long l = b.length(this.key); final long l = b.length(this.key);
if (l >= 0) return Long.valueOf(l); if (l >= 0) return Long.valueOf(l);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "", e); ConcurrentLog.severe("KELONDRO", "ArrayStack", e);
return null; return null;
} }
} }
@ -891,10 +891,10 @@ public class ArrayStack implements BLOB {
try { try {
final boolean terminated = DELETE_EXECUTOR.awaitTermination(timeout, TimeUnit.SECONDS); final boolean terminated = DELETE_EXECUTOR.awaitTermination(timeout, TimeUnit.SECONDS);
if(!terminated) { if(!terminated) {
ConcurrentLog.warn("ArrayStack", "Delete executor service could not terminated within " + timeout + " second"); ConcurrentLog.warn("KELONDRO", "ArrayStack: Delete executor service could not terminated within " + timeout + " second");
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
ConcurrentLog.warn("ArrayStack", "Interrupted before termination of the delete executor service"); ConcurrentLog.warn("KELONDRO", "ArrayStack: Interrupted before termination of the delete executor service");
} }
} }
@ -925,34 +925,34 @@ public class ArrayStack implements BLOB {
final File newFile, final int writeBuffer) { final File newFile, final int writeBuffer) {
if (f2 == null) { if (f2 == null) {
// this is a rewrite // this is a rewrite
ConcurrentLog.info("BLOBArray", "rewrite of " + f1.getName()); ConcurrentLog.info("KELONDRO", "BLOBArray: rewrite of " + f1.getName());
final File resultFile = rewriteWorker(factory, this.keylength, this.ordering, f1, newFile, writeBuffer); final File resultFile = rewriteWorker(factory, this.keylength, this.ordering, f1, newFile, writeBuffer);
if (resultFile == null) { if (resultFile == null) {
ConcurrentLog.warn("BLOBArray", "rewrite of file " + f1 + " returned null. newFile = " + newFile); ConcurrentLog.warn("KELONDRO", "BLOBArray: rewrite of file " + f1 + " returned null. newFile = " + newFile);
return null; return null;
} }
try { try {
mountBLOB(resultFile, false); mountBLOB(resultFile, false);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.warn("BLOBArray", "rewrite of file " + f1 + " successfull, but read failed. resultFile = " + resultFile); ConcurrentLog.warn("KELONDRO", "BLOBArray: rewrite of file " + f1 + " successfull, but read failed. resultFile = " + resultFile);
return null; return null;
} }
ConcurrentLog.info("BLOBArray", "rewrite of " + f1.getName() + " into " + resultFile); ConcurrentLog.info("KELONDRO", "BLOBArray: rewrite of " + f1.getName() + " into " + resultFile);
return resultFile; return resultFile;
} }
ConcurrentLog.info("BLOBArray", "merging " + f1.getName() + " with " + f2.getName()); ConcurrentLog.info("KELONDRO", "BLOBArray: merging " + f1.getName() + " with " + f2.getName());
final File resultFile = mergeWorker(factory, this.keylength, this.ordering, f1, f2, newFile, writeBuffer); final File resultFile = mergeWorker(factory, this.keylength, this.ordering, f1, f2, newFile, writeBuffer);
if (resultFile == null) { if (resultFile == null) {
ConcurrentLog.warn("BLOBArray", "merge of files " + f1 + ", " + f2 + " returned null. newFile = " + newFile); ConcurrentLog.warn("KELONDRO", "BLOBArray: merge of files " + f1 + ", " + f2 + " returned null. newFile = " + newFile);
return null; return null;
} }
try { try {
mountBLOB(resultFile, false); mountBLOB(resultFile, false);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.warn("BLOBArray", "merge of files " + f1 + ", " + f2 + " successfull, but read failed. resultFile = " + resultFile); ConcurrentLog.warn("KELONDRO", "BLOBArray: merge of files " + f1 + ", " + f2 + " successfull, but read failed. resultFile = " + resultFile);
return null; return null;
} }
ConcurrentLog.info("BLOBArray", "merged " + f1.getName() + " with " + f2.getName() + " into " + resultFile); ConcurrentLog.info("KELONDRO", "BLOBArray: merged " + f1.getName() + " with " + f2.getName() + " into " + resultFile);
return resultFile; return resultFile;
} }
@ -988,12 +988,12 @@ public class ArrayStack implements BLOB {
merge(i1, i2, order, writer); merge(i1, i2, order, writer);
writer.close(true); writer.close(true);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot writing or close writing merge, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot writing or close writing merge, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
HeapWriter.delete(tmpFile); HeapWriter.delete(tmpFile);
HeapWriter.delete(newFile); HeapWriter.delete(newFile);
return null; return null;
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because of memory failure: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because of memory failure: " + e.getMessage(), e);
HeapWriter.delete(tmpFile); HeapWriter.delete(tmpFile);
HeapWriter.delete(newFile); HeapWriter.delete(newFile);
return null; return null;
@ -1003,13 +1003,13 @@ public class ArrayStack implements BLOB {
HeapWriter.delete(f2); HeapWriter.delete(f2);
return newFile; return newFile;
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because input files cannot be read, f2 = " + f2.toString() + ": " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because input files cannot be read, f2 = " + f2.toString() + ": " + e.getMessage(), e);
return null; return null;
} finally { } finally {
if (i2 != null) i2.close(); if (i2 != null) i2.close();
} }
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot merge because input files cannot be read, f1 = " + f1.toString() + ": " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot merge because input files cannot be read, f1 = " + f1.toString() + ": " + e.getMessage(), e);
return null; return null;
} finally { } finally {
if (i1 != null) i1.close(); if (i1 != null) i1.close();
@ -1025,7 +1025,7 @@ public class ArrayStack implements BLOB {
try { try {
i = new ReferenceIterator<ReferenceType>(f, factory); i = new ReferenceIterator<ReferenceType>(f, factory);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot rewrite because input file cannot be read, f = " + f.toString() + ": " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot rewrite because input file cannot be read, f = " + f.toString() + ": " + e.getMessage(), e);
return null; return null;
} }
if (!i.hasNext()) { if (!i.hasNext()) {
@ -1040,12 +1040,12 @@ public class ArrayStack implements BLOB {
writer.close(true); writer.close(true);
i.close(); i.close();
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("ArrayStack", "cannot writing or close writing rewrite, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot writing or close writing rewrite, newFile = " + newFile.toString() + ", tmpFile = " + tmpFile.toString() + ": " + e.getMessage(), e);
FileUtils.deletedelete(tmpFile); FileUtils.deletedelete(tmpFile);
FileUtils.deletedelete(newFile); FileUtils.deletedelete(newFile);
return null; return null;
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.severe("ArrayStack", "cannot rewrite because of memory failure: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "ArrayStack: cannot rewrite because of memory failure: " + e.getMessage(), e);
FileUtils.deletedelete(tmpFile); FileUtils.deletedelete(tmpFile);
FileUtils.deletedelete(newFile); FileUtils.deletedelete(newFile);
return null; return null;
@ -1072,7 +1072,7 @@ public class ArrayStack implements BLOB {
e = ordering.compare(c1.getTermHash(), c2.getTermHash()); e = ordering.compare(c1.getTermHash(), c2.getTermHash());
if (e < 0) { if (e < 0) {
s = c1.shrinkReferences(); s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection()); writer.add(c1.getTermHash(), c1.exportCollection());
if (i1.hasNext()) { if (i1.hasNext()) {
c1lh = c1.getTermHash(); c1lh = c1.getTermHash();
@ -1085,7 +1085,7 @@ public class ArrayStack implements BLOB {
} }
if (e > 0) { if (e > 0) {
s = c2.shrinkReferences(); s = c2.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
writer.add(c2.getTermHash(), c2.exportCollection()); writer.add(c2.getTermHash(), c2.exportCollection());
if (i2.hasNext()) { if (i2.hasNext()) {
c2lh = c2.getTermHash(); c2lh = c2.getTermHash();
@ -1100,7 +1100,7 @@ public class ArrayStack implements BLOB {
// merge the entries // merge the entries
c1 = c1.merge(c2); c1 = c1.merge(c2);
s = c1.shrinkReferences(); s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection()); writer.add(c1.getTermHash(), c1.exportCollection());
c1lh = c1.getTermHash(); c1lh = c1.getTermHash();
c2lh = c2.getTermHash(); c2lh = c2.getTermHash();
@ -1130,7 +1130,7 @@ public class ArrayStack implements BLOB {
while (c1 != null) { while (c1 != null) {
//System.out.println("FLUSH REMAINING 1: " + c1.getWordHash()); //System.out.println("FLUSH REMAINING 1: " + c1.getWordHash());
s = c1.shrinkReferences(); s = c1.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c1.getTermHash()) + " by " + s + " to " + c1.size() + " entries");
writer.add(c1.getTermHash(), c1.exportCollection()); writer.add(c1.getTermHash(), c1.exportCollection());
if (i1.hasNext()) { if (i1.hasNext()) {
c1lh = c1.getTermHash(); c1lh = c1.getTermHash();
@ -1143,7 +1143,7 @@ public class ArrayStack implements BLOB {
while (c2 != null) { while (c2 != null) {
//System.out.println("FLUSH REMAINING 2: " + c2.getWordHash()); //System.out.println("FLUSH REMAINING 2: " + c2.getWordHash());
s = c2.shrinkReferences(); s = c2.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c2.getTermHash()) + " by " + s + " to " + c2.size() + " entries");
writer.add(c2.getTermHash(), c2.exportCollection()); writer.add(c2.getTermHash(), c2.exportCollection());
if (i2.hasNext()) { if (i2.hasNext()) {
c2lh = c2.getTermHash(); c2lh = c2.getTermHash();
@ -1167,7 +1167,7 @@ public class ArrayStack implements BLOB {
while (true) { while (true) {
assert c != null; assert c != null;
s = c.shrinkReferences(); s = c.shrinkReferences();
if (s > 0) ConcurrentLog.info("ArrayStack", "shrinking index for " + ASCII.String(c.getTermHash()) + " by " + s + " to " + c.size() + " entries"); if (s > 0) ConcurrentLog.info("KELONDRO", "ArrayStack: shrinking index for " + ASCII.String(c.getTermHash()) + " by " + s + " to " + c.size() + " entries");
writer.add(c.getTermHash(), c.exportCollection()); writer.add(c.getTermHash(), c.exportCollection());
if (i.hasNext()) { if (i.hasNext()) {
clh = c.getTermHash(); clh = c.getTermHash();

@ -99,7 +99,7 @@ public class BEncodedHeap implements MapStore {
try { try {
return this.table.keys(true, false); return this.table.keys(true, false);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeap", "returning empty iterator for failed key iteration: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "BEncodedHeap: returning empty iterator for failed key iteration: " + e.getMessage(), e);
return new CloneableIterator<byte[]>(){ return new CloneableIterator<byte[]>(){
@Override @Override
@ -620,7 +620,7 @@ public class BEncodedHeap implements MapStore {
this.table = new Heap(location, keylen, order, buffermax); this.table = new Heap(location, keylen, order, buffermax);
return iter; return iter;
} catch (final IOException e ) { } catch (final IOException e ) {
ConcurrentLog.severe("PropertiesTable", e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "PropertiesTable " + e.getMessage(), e);
return null; return null;
} }
} }

@ -97,7 +97,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try { try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime(); d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime();
} catch (final ParseException e) { } catch (final ParseException e) {
ConcurrentLog.severe("BEncodedHeapBag", "", e); ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag", e);
continue; continue;
} }
time = d.getTime(); time = d.getTime();
@ -134,11 +134,11 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
t.remove(maxf); t.remove(maxf);
f = new File(this.baseDir, maxf); f = new File(this.baseDir, maxf);
try { try {
ConcurrentLog.info("BEncodedHeapBag", "opening partial heap " + f); ConcurrentLog.info("KELONDRO", "BEncodedHeapBag: opening partial heap " + f);
BEncodedHeap heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax); BEncodedHeap heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax);
this.bag.put(maxf, heap); this.bag.put(maxf, heap);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeapBag", "error opening partial heap " + f); ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag: error opening partial heap " + f);
} }
} }
} }
@ -187,7 +187,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try { try {
heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax); heap = new BEncodedHeap(f, this.keylength, this.entryOrder, this.buffermax);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("BEncodedHeapBag", "unable to open new heap file: " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag: unable to open new heap file: " + e.getMessage(), e);
return null; return null;
} }
this.bag.put(this.current, heap); this.bag.put(this.current, heap);
@ -205,7 +205,7 @@ public class BEncodedHeapBag extends AbstractMapStore implements MapStore {
try { try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime(); d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime();
} catch (final ParseException e) { } catch (final ParseException e) {
ConcurrentLog.severe("BEncodedHeapBag", "", e); ConcurrentLog.severe("KELONDRO", "BEncodedHeapBag", e);
d = 0; d = 0;
} }
if (d + this.fileAgeLimit < t || new File(this.baseDir, name).length() >= this.fileSizeLimit) { if (d + this.fileAgeLimit < t || new File(this.baseDir, name).length() >= this.fileSizeLimit) {

@ -146,7 +146,7 @@ public class BEncodedHeapShard extends AbstractMapStore implements MapStore {
// open all tables of this shard // open all tables of this shard
for (final String element : tablefile) { for (final String element : tablefile) {
if (this.shardMethod.isShardPart(element)) { if (this.shardMethod.isShardPart(element)) {
ConcurrentLog.info("BEncodedHeapShard", "opening partial shard " + element); ConcurrentLog.info("KELONDRO", "BEncodedHeapShard: opening partial shard " + element);
MapStore bag = openBag(element); MapStore bag = openBag(element);
this.shard.put(this.shardMethod.getShardName(element), bag); this.shard.put(this.shardMethod.getShardName(element), bag);
} }

@ -158,7 +158,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
baos.close(); baos.close();
return baos.toByteArray(); return baos.toByteArray();
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.severe("Compressor", "", e); ConcurrentLog.severe("KELONDRO", "Compressor", e);
return null; return null;
} }
} }
@ -219,7 +219,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS); locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ignored) { } catch (InterruptedException ignored) {
/* When interrupted, simply return null */ /* When interrupted, simply return null */
ConcurrentLog.fine("Compressor", "Interrupted while acquiring a synchronzation lock on get()"); ConcurrentLog.fine("KELONDRO", "Compressor: Interrupted while acquiring a synchronzation lock on get()");
} }
if(locked) { if(locked) {
try { try {
@ -241,7 +241,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
} }
return decompress(b); return decompress(b);
} }
ConcurrentLog.fine("Compressor", "Could not acquire a synchronization lock for retrieval within " + this.lockTimeout + " milliseconds"); ConcurrentLog.fine("KELONDRO", "Compressor: Could not acquire a synchronization lock for retrieval within " + this.lockTimeout + " milliseconds");
return b; return b;
} }
@ -318,7 +318,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS); locked = this.lock.tryLock(this.lockTimeout, TimeUnit.MILLISECONDS);
} catch (InterruptedException ignored) { } catch (InterruptedException ignored) {
/* When interrupted, simply nothing is inserted */ /* When interrupted, simply nothing is inserted */
ConcurrentLog.fine("Compressor", "Interrupted while acquiring a synchronzation lock on insert()"); ConcurrentLog.fine("KELONDRO", "Compressor: Interrupted while acquiring a synchronzation lock on insert()");
} }
if(locked) { if(locked) {
try { try {
@ -349,7 +349,7 @@ public class Compressor implements BLOB, Iterable<byte[]> {
flushAll(); flushAll();
} }
} else { } else {
ConcurrentLog.fine("Compressor", "Could not acquire a synchronization lock for insertion within " + this.lockTimeout + " milliseconds"); ConcurrentLog.fine("KELONDRO", "Compressor: Could not acquire a synchronization lock for insertion within " + this.lockTimeout + " milliseconds");
} }
} }

@ -89,7 +89,7 @@ public final class Heap extends HeapModifier implements BLOB {
this.buffermax = buffermax; this.buffermax = buffermax;
this.buffer = new TreeMap<byte[], byte[]>(ordering); this.buffer = new TreeMap<byte[], byte[]>(ordering);
this.buffersize = 0; this.buffersize = 0;
ConcurrentLog.info("Heap", "initializing heap " + this.name()); ConcurrentLog.info("KELONDRO", "Heap: initializing heap " + this.name());
/* /*
// DEBUG // DEBUG
Iterator<byte[]> i = index.keys(true, null); Iterator<byte[]> i = index.keys(true, null);
@ -279,7 +279,7 @@ public final class Heap extends HeapModifier implements BLOB {
*/ */
@Override @Override
public synchronized void clear() throws IOException { public synchronized void clear() throws IOException {
ConcurrentLog.info("Heap", "clearing heap " + this.name()); ConcurrentLog.info("KELONDRO", "Heap: clearing heap " + this.name());
assert this.buffer != null; assert this.buffer != null;
if (this.buffer == null) this.buffer = new TreeMap<byte[], byte[]>(this.ordering); if (this.buffer == null) this.buffer = new TreeMap<byte[], byte[]>(this.ordering);
this.buffer.clear(); this.buffer.clear();
@ -292,7 +292,7 @@ public final class Heap extends HeapModifier implements BLOB {
*/ */
@Override @Override
public synchronized void close(final boolean writeIDX) { public synchronized void close(final boolean writeIDX) {
ConcurrentLog.info("Heap", "closing heap " + this.name()); ConcurrentLog.info("KELONDRO", "Heap: closing heap " + this.name());
if (this.file != null && this.buffer != null) { if (this.file != null && this.buffer != null) {
try { try {
flushBuffer(); flushBuffer();

@ -115,8 +115,8 @@ public class HeapModifier extends HeapReader implements BLOB {
//assert seek + size + 4 <= this.file.length() : heapFile.getName() + ": too long size " + size + " in record at " + seek; //assert seek + size + 4 <= this.file.length() : heapFile.getName() + ": too long size " + size + " in record at " + seek;
long filelength = this.file.length(); // put in separate variable for debugging long filelength = this.file.length(); // put in separate variable for debugging
if (seek + size + 4 > filelength) { if (seek + size + 4 > filelength) {
ConcurrentLog.severe("BLOBHeap", this.heapFile.getName() + ": too long size " + size + " in record at " + seek); ConcurrentLog.severe("KELONDRO", "BLOBHeap: " + this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
throw new IOException(this.heapFile.getName() + ": too long size " + size + " in record at " + seek); throw new IOException("BLOBHeap: " + this.heapFile.getName() + ": too long size " + size + " in record at " + seek);
} }
super.deleteFingerprint(); super.deleteFingerprint();

@ -58,7 +58,7 @@ public class HeapReader {
//public final static long keepFreeMem = 20 * 1024 * 1024; //public final static long keepFreeMem = 20 * 1024 * 1024;
private final static ConcurrentLog log = new ConcurrentLog("HeapReader"); private final static ConcurrentLog log = new ConcurrentLog("KELONDRO");
// input values // input values
protected int keylength; // the length of the primary key protected int keylength; // the length of the primary key
@ -107,10 +107,10 @@ public class HeapReader {
} }
} }
if (!ok) { if (!ok) {
log.warn("verification of idx file for " + heapFile.toString() + " failed, re-building index"); log.warn("HeapReader: verification of idx file for " + heapFile.toString() + " failed, re-building index");
initIndexReadFromHeap(); initIndexReadFromHeap();
} else { } else {
log.info("using a dump of the index of " + heapFile.toString() + "."); log.info("HeapReader: using a dump of the index of " + heapFile.toString() + ".");
} }
} else { } else {
// if we did not have a dump, create a new index // if we did not have a dump, create a new index
@ -161,7 +161,7 @@ public class HeapReader {
// if this is successful, return true; otherwise false // if this is successful, return true; otherwise false
String fingerprint = fingerprintFileHash(this.heapFile); String fingerprint = fingerprintFileHash(this.heapFile);
if (fingerprint == null) { if (fingerprint == null) {
log.severe("cannot generate a fingerprint for " + this.heapFile + ": null"); log.severe("HeapReader: cannot generate a fingerprint for " + this.heapFile + ": null");
return false; return false;
} }
this.fingerprintFileIdx = HeapWriter.fingerprintIndexFile(this.heapFile, fingerprint); this.fingerprintFileIdx = HeapWriter.fingerprintIndexFile(this.heapFile, fingerprint);
@ -188,7 +188,7 @@ public class HeapReader {
// check saturation // check saturation
if (this.index instanceof RowHandleMap) { if (this.index instanceof RowHandleMap) {
int[] saturation = ((RowHandleMap) this.index).saturation(); // {<the maximum length of consecutive equal-beginning bytes in the key>, <the minimum number of leading zeros in the second column>} int[] saturation = ((RowHandleMap) this.index).saturation(); // {<the maximum length of consecutive equal-beginning bytes in the key>, <the minimum number of leading zeros in the second column>}
log.info("saturation of " + this.fingerprintFileIdx.getName() + ": keylength = " + saturation[0] + ", vallength = " + saturation[1] + ", size = " + this.index.size() + log.info("HeapReader: saturation of " + this.fingerprintFileIdx.getName() + ": keylength = " + saturation[0] + ", vallength = " + saturation[1] + ", size = " + this.index.size() +
", maximum saving for index-compression = " + (saturation[0] * this.index.size() / 1024 / 1024) + " MB" + ", maximum saving for index-compression = " + (saturation[0] * this.index.size() / 1024 / 1024) + " MB" +
", exact saving for value-compression = " + (saturation[1] * this.index.size() / 1024 / 1024) + " MB"); ", exact saving for value-compression = " + (saturation[1] * this.index.size() / 1024 / 1024) + " MB");
} }
@ -249,7 +249,7 @@ public class HeapReader {
private void initIndexReadFromHeap() throws IOException { private void initIndexReadFromHeap() throws IOException {
// this initializes the this.index object by reading positions from the heap file // this initializes the this.index object by reading positions from the heap file
log.info("generating index for " + this.heapFile.toString() + ", " + (this.file.length() / 1024 / 1024) + " MB. Please wait."); log.info("HeapReader: generating index for " + this.heapFile.toString() + ", " + (this.file.length() / 1024 / 1024) + " MB. Please wait.");
this.free = new Gap(); this.free = new Gap();
RowHandleMap.initDataConsumer indexready = RowHandleMap.asynchronusInitializer(this.name() + ".initializer", this.keylength, this.ordering, 8, Math.max(10, (int) (Runtime.getRuntime().freeMemory() / (10 * 1024 * 1024)))); RowHandleMap.initDataConsumer indexready = RowHandleMap.asynchronusInitializer(this.name() + ".initializer", this.keylength, this.ordering, 8, Math.max(10, (int) (Runtime.getRuntime().freeMemory() / (10 * 1024 * 1024))));
@ -268,7 +268,7 @@ public class HeapReader {
//assert reclen > 0 : " reclen == 0 at seek pos " + seek; //assert reclen > 0 : " reclen == 0 at seek pos " + seek;
if (reclen == 0) { if (reclen == 0) {
// very bad file inconsistency // very bad file inconsistency
log.severe("reclen == 0 at seek pos " + seek + " in file " + this.heapFile); log.severe("HeapReader: reclen == 0 at seek pos " + seek + " in file " + this.heapFile);
this.file.setLength(seek); // delete everything else at the remaining of the file :-( this.file.setLength(seek); // delete everything else at the remaining of the file :-(
break loop; break loop;
} }
@ -295,7 +295,7 @@ public class HeapReader {
this.file.seek(seek + 4); this.file.seek(seek + 4);
Arrays.fill(key, (byte) 0); Arrays.fill(key, (byte) 0);
this.file.write(key); // mark the place as empty record this.file.write(key); // mark the place as empty record
log.warn("BLOB " + this.heapFile.getName() + ": skiped not wellformed key " + UTF8.String(key) + " at seek pos " + seek); log.warn("HeapReader: BLOB " + this.heapFile.getName() + ": skiped not wellformed key " + UTF8.String(key) + " at seek pos " + seek);
} }
} }
// new seek position // new seek position
@ -312,7 +312,7 @@ public class HeapReader {
} catch (final ExecutionException e) { } catch (final ExecutionException e) {
ConcurrentLog.logException(e); ConcurrentLog.logException(e);
} }
log.info("finished index generation for " + this.heapFile.toString() + ", " + this.index.size() + " entries, " + this.free.size() + " gaps."); log.info("HeapReader: finished index generation for " + this.heapFile.toString() + ", " + this.index.size() + " entries, " + this.free.size() + " gaps.");
} }
private void mergeFreeEntries() throws IOException { private void mergeFreeEntries() throws IOException {
@ -340,7 +340,7 @@ public class HeapReader {
lastFree = nextFree; lastFree = nextFree;
} }
} }
log.info("BLOB " + this.heapFile.toString() + ": merged " + merged + " free records"); log.info("HeapReader: BLOB " + this.heapFile.toString() + ": merged " + merged + " free records");
if (merged > 0) deleteFingerprint(); if (merged > 0) deleteFingerprint();
} }
} }
@ -360,7 +360,7 @@ public class HeapReader {
public int size() { public int size() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in size(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in size(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0; return 0;
} }
return (this.index == null) ? 0 : this.index.size(); return (this.index == null) ? 0 : this.index.size();
@ -369,7 +369,7 @@ public class HeapReader {
public boolean isEmpty() { public boolean isEmpty() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in isEmpty(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in isEmpty(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return true; return true;
} }
return this.index.isEmpty(); return this.index.isEmpty();
@ -383,7 +383,7 @@ public class HeapReader {
public boolean containsKey(byte[] key) { public boolean containsKey(byte[] key) {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in containsKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in containsKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return false; return false;
} }
key = normalizeKey(key); key = normalizeKey(key);
@ -408,7 +408,7 @@ public class HeapReader {
protected synchronized byte[] firstKey() throws IOException { protected synchronized byte[] firstKey() throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in firstKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in firstKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
synchronized (this.index) { synchronized (this.index) {
@ -426,7 +426,7 @@ public class HeapReader {
protected byte[] first() throws IOException, SpaceExceededException { protected byte[] first() throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in first(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in first(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
synchronized (this.index) { synchronized (this.index) {
@ -446,7 +446,7 @@ public class HeapReader {
protected byte[] lastKey() throws IOException { protected byte[] lastKey() throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in lastKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in lastKey(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
if (this.index == null) return null; if (this.index == null) return null;
@ -465,7 +465,7 @@ public class HeapReader {
protected byte[] last() throws IOException, SpaceExceededException { protected byte[] last() throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in last(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in last(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
synchronized (this.index) { synchronized (this.index) {
@ -484,7 +484,7 @@ public class HeapReader {
public byte[] get(byte[] key) throws IOException, SpaceExceededException { public byte[] get(byte[] key) throws IOException, SpaceExceededException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in get(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in get(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
key = normalizeKey(key); key = normalizeKey(key);
@ -499,7 +499,7 @@ public class HeapReader {
final int len = this.file.readInt() - this.keylength; final int len = this.file.readInt() - this.keylength;
if (len < 0) { if (len < 0) {
// database file may be corrupted and should be deleted :-(( // database file may be corrupted and should be deleted :-((
log.severe("file " + this.file.file() + " corrupted at " + pos + ": negative len. len = " + len + ", pk.len = " + this.keylength); log.severe("HeapReader: file " + this.file.file() + " corrupted at " + pos + ": negative len. len = " + len + ", pk.len = " + this.keylength);
// to get lazy over that problem (who wants to tell the user to stop operation and delete the file???) we work on like the entry does not exist // to get lazy over that problem (who wants to tell the user to stop operation and delete the file???) we work on like the entry does not exist
this.index.remove(key); this.index.remove(key);
return null; return null;
@ -519,7 +519,7 @@ public class HeapReader {
this.file.readFully(keyf, 0, keyf.length); this.file.readFully(keyf, 0, keyf.length);
if (!this.ordering.equal(key, keyf)) { if (!this.ordering.equal(key, keyf)) {
// verification of the indexed access failed. we must re-read the index // verification of the indexed access failed. we must re-read the index
log.severe("indexed verification access failed for " + this.heapFile.toString()); log.severe("HeapReader: indexed verification access failed for " + this.heapFile.toString());
// this is a severe operation, it should never happen. // this is a severe operation, it should never happen.
// remove entry from index because keeping that element in the index would not make sense // remove entry from index because keeping that element in the index would not make sense
this.index.remove(key); this.index.remove(key);
@ -581,7 +581,7 @@ public class HeapReader {
public long length(byte[] key) throws IOException { public long length(byte[] key) throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0; return 0;
} }
key = normalizeKey(key); key = normalizeKey(key);
@ -617,17 +617,17 @@ public class HeapReader {
try { try {
String fingerprint = fingerprintFileHash(this.heapFile); String fingerprint = fingerprintFileHash(this.heapFile);
if (fingerprint == null) { if (fingerprint == null) {
log.severe("cannot write a dump for " + this.heapFile.getName()+ ": fingerprint is null"); log.severe("HeapReader: cannot write a dump for " + this.heapFile.getName()+ ": fingerprint is null");
} else { } else {
File newFingerprintFileGap = HeapWriter.fingerprintGapFile(this.heapFile, fingerprint); File newFingerprintFileGap = HeapWriter.fingerprintGapFile(this.heapFile, fingerprint);
if (this.fingerprintFileGap != null && if (this.fingerprintFileGap != null &&
this.fingerprintFileGap.getName().equals(newFingerprintFileGap.getName()) && this.fingerprintFileGap.getName().equals(newFingerprintFileGap.getName()) &&
this.fingerprintFileGap.exists()) { this.fingerprintFileGap.exists()) {
log.info("using existing gap dump instead of writing a new one: " + this.fingerprintFileGap.getName()); log.info("HeapReader: using existing gap dump instead of writing a new one: " + this.fingerprintFileGap.getName());
} else { } else {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
this.free.dump(newFingerprintFileGap); this.free.dump(newFingerprintFileGap);
log.info("wrote a dump for the " + this.free.size() + " gap entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds."); log.info("HeapReader: wrote a dump for the " + this.free.size() + " gap entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
} }
} }
this.free.clear(); this.free.clear();
@ -637,11 +637,11 @@ public class HeapReader {
if (this.fingerprintFileIdx != null && if (this.fingerprintFileIdx != null &&
this.fingerprintFileIdx.getName().equals(newFingerprintFileIdx.getName()) && this.fingerprintFileIdx.getName().equals(newFingerprintFileIdx.getName()) &&
this.fingerprintFileIdx.exists()) { this.fingerprintFileIdx.exists()) {
log.info("using existing idx dump instead of writing a new one: " + this.fingerprintFileIdx.getName()); log.info("HeapReader: using existing idx dump instead of writing a new one: " + this.fingerprintFileIdx.getName());
} else { } else {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
this.index.dump(newFingerprintFileIdx); this.index.dump(newFingerprintFileIdx);
log.info("wrote a dump for the " + this.index.size() + " index entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds."); log.info("HeapReader: wrote a dump for the " + this.index.size() + " index entries of " + this.heapFile.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
} }
} }
this.index.close(); this.index.close();
@ -656,7 +656,9 @@ public class HeapReader {
this.index = null; this.index = null;
this.closeDate = new Date(); this.closeDate = new Date();
} catch (Throwable e) {ConcurrentLog.logException(e);} } catch (Throwable e) {ConcurrentLog.logException(e);}
log.info("close HeapFile " + this.heapFile.getName() + "; trace: " + ConcurrentLog.stackTrace());
log.info("HeapReader: close HeapFile " + this.heapFile.getName());
log.fine("trace: " + ConcurrentLog.stackTrace());
} }
} }
@ -681,7 +683,7 @@ public class HeapReader {
*/ */
public CloneableIterator<byte[]> keys(final boolean up, final boolean rotating) throws IOException { public CloneableIterator<byte[]> keys(final boolean up, final boolean rotating) throws IOException {
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
synchronized (this.index) { synchronized (this.index) {
@ -699,7 +701,7 @@ public class HeapReader {
public CloneableIterator<byte[]> keys(final boolean up, final byte[] firstKey) throws IOException { public CloneableIterator<byte[]> keys(final boolean up, final byte[] firstKey) throws IOException {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in keys(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return null; return null;
} }
synchronized (this.index) { synchronized (this.index) {
@ -710,7 +712,7 @@ public class HeapReader {
public long length() { public long length() {
assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date(); assert (this.index != null) : "index == null; closeDate=" + this.closeDate + ", now=" + new Date();
if (this.index == null) { if (this.index == null) {
log.severe("this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString())); log.severe("HeapReader: this.index == null in length(); closeDate=" + this.closeDate + ", now=" + new Date() + this.heapFile == null ? "" : (" file = " + this.heapFile.toString()));
return 0; return 0;
} }
synchronized (this.index) { synchronized (this.index) {
@ -780,7 +782,7 @@ public class HeapReader {
// read some more bytes to consume the empty record // read some more bytes to consume the empty record
if (len > 1) { if (len > 1) {
if (len - 1 != this.is.skipBytes(len - 1)) { // all that is remaining if (len - 1 != this.is.skipBytes(len - 1)) { // all that is remaining
log.warn("problem skiping " + + len + " bytes in " + this.blobFile.getName()); log.warn("HeapReader: problem skiping " + + len + " bytes in " + this.blobFile.getName());
try {this.is.close();} catch (final IOException e) {} try {this.is.close();} catch (final IOException e) {}
return null; return null;
} }
@ -811,7 +813,7 @@ public class HeapReader {
// the allocation of memory for the payload may fail // the allocation of memory for the payload may fail
// this is bad because we must interrupt the iteration here but the // this is bad because we must interrupt the iteration here but the
// process that uses the iteration may think that the iteraton has just been completed // process that uses the iteration may think that the iteraton has just been completed
log.severe("out of memory in LookAheadIterator.next0 for file " + this.blobFile.toString(), ee); log.severe("HeapReader: out of memory in LookAheadIterator.next0 for file " + this.blobFile.toString(), ee);
try {this.is.close();} catch (final IOException e) {} try {this.is.close();} catch (final IOException e) {}
return null; return null;
} }

@ -41,7 +41,7 @@ import net.yacy.kelondro.util.FileUtils;
public final class HeapWriter { public final class HeapWriter {
private final static ConcurrentLog log = new ConcurrentLog("HeapWriter"); private final static ConcurrentLog log = new ConcurrentLog("KELONDRO");
public final static byte[] ZERO = new byte[]{0}; public final static byte[] ZERO = new byte[]{0};
private final int keylength; // the length of the primary key private final int keylength; // the length of the primary key
@ -95,7 +95,7 @@ public final class HeapWriter {
try { try {
fileStream.close(); fileStream.close();
} catch(IOException ignored) { } catch(IOException ignored) {
log.warn("Could not close output stream on file " + temporaryHeapFile); log.warn("HeapWriter: Could not close output stream on file " + temporaryHeapFile);
} }
throw e; throw e;
} }
@ -147,9 +147,9 @@ public final class HeapWriter {
// rename the file into final name // rename the file into final name
if (this.heapFileREADY.exists()) FileUtils.deletedelete(this.heapFileREADY); if (this.heapFileREADY.exists()) FileUtils.deletedelete(this.heapFileREADY);
boolean renameok = this.heapFileTMP.renameTo(this.heapFileREADY); boolean renameok = this.heapFileTMP.renameTo(this.heapFileREADY);
if (!renameok) throw new IOException("cannot rename " + this.heapFileTMP + " to " + this.heapFileREADY); if (!renameok) throw new IOException("HeapWriter: cannot rename " + this.heapFileTMP + " to " + this.heapFileREADY);
if (!this.heapFileREADY.exists()) throw new IOException("renaming of " + this.heapFileREADY.toString() + " failed: files still exists"); if (!this.heapFileREADY.exists()) throw new IOException("HeapWriter: renaming of " + this.heapFileREADY.toString() + " failed: files still exists");
if (this.heapFileTMP.exists()) throw new IOException("renaming to " + this.heapFileTMP.toString() + " failed: file does not exist"); if (this.heapFileTMP.exists()) throw new IOException("HeapWriter: renaming to " + this.heapFileTMP.toString() + " failed: file does not exist");
// generate index and gap files // generate index and gap files
if (writeIDX && this.index.size() > 3) { if (writeIDX && this.index.size() > 3) {
@ -158,11 +158,11 @@ public final class HeapWriter {
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
String fingerprint = HeapReader.fingerprintFileHash(this.heapFileREADY); String fingerprint = HeapReader.fingerprintFileHash(this.heapFileREADY);
if (fingerprint == null) { if (fingerprint == null) {
log.severe("cannot write a dump for " + this.heapFileREADY.getName()+ ": fingerprint is null"); log.severe("HeapWriter: cannot write a dump for " + this.heapFileREADY.getName()+ ": fingerprint is null");
} else { } else {
new Gap().dump(fingerprintGapFile(this.heapFileREADY, fingerprint)); new Gap().dump(fingerprintGapFile(this.heapFileREADY, fingerprint));
this.index.dump(fingerprintIndexFile(this.heapFileREADY, fingerprint)); this.index.dump(fingerprintIndexFile(this.heapFileREADY, fingerprint));
log.info("wrote a dump for the " + this.index.size() + " index entries of " + this.heapFileREADY.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds."); log.info("HeapWriter: wrote a dump for the " + this.index.size() + " index entries of " + this.heapFileREADY.getName()+ " in " + (System.currentTimeMillis() - start) + " milliseconds.");
} }
this.index.close(); this.index.close();
this.index = null; this.index = null;

@ -124,7 +124,7 @@ public class MapDataMining extends MapHeap {
try { try {
map = super.get(mapnameb); map = super.get(mapnameb);
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.warn("MapDataMining", e.getMessage()); ConcurrentLog.warn("KELONDRO", "MapDataMining: " + e.getMessage());
break; break;
} }
if (map == null) break; if (map == null) break;

@ -486,7 +486,7 @@ public class MapHeap implements Map<byte[], Map<String, String>> {
try { try {
map = get(nextKey, false); map = get(nextKey, false);
} catch (final IOException e) { } catch (final IOException e) {
ConcurrentLog.warn("MapDataMining", e.getMessage()); ConcurrentLog.warn("KELONDRO", "MapDataMining: " + e.getMessage());
continue; continue;
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.logException(e); ConcurrentLog.logException(e);

@ -184,7 +184,7 @@ public class URIMetadataNode extends SolrDocument /* implements Comparable<URIMe
final String myhash = ASCII.String(this.url.hash()); final String myhash = ASCII.String(this.url.hash());
if (!hashstr.equals(myhash)) { if (!hashstr.equals(myhash)) {
this.setField(CollectionSchema.id.getSolrFieldName(), myhash); this.setField(CollectionSchema.id.getSolrFieldName(), myhash);
ConcurrentLog.fine("URIMetadataNode", "updated document.ID of " + urlRaw + " from " + hashstr + " to " + myhash); ConcurrentLog.fine("KELONDRO", "URIMetadataNode: updated document.ID of " + urlRaw + " from " + hashstr + " to " + myhash);
// ususally the hosthash matches but just to be on the safe site // ususally the hosthash matches but just to be on the safe site
final String hostidstr = getString(CollectionSchema.host_id_s); // id or empty string final String hostidstr = getString(CollectionSchema.host_id_s); // id or empty string
if (!hostidstr.isEmpty() && !hostidstr.equals(this.url.hosthash())) { if (!hostidstr.isEmpty() && !hostidstr.equals(this.url.hosthash())) {
@ -704,14 +704,14 @@ public class URIMetadataNode extends SolrDocument /* implements Comparable<URIMe
public static URIMetadataNode importEntry(final String propStr, String collection) { public static URIMetadataNode importEntry(final String propStr, String collection) {
if (propStr == null || propStr.isEmpty() || propStr.charAt(0) != '{' || !propStr.endsWith("}")) { if (propStr == null || propStr.isEmpty() || propStr.charAt(0) != '{' || !propStr.endsWith("}")) {
ConcurrentLog.severe("URIMetadataNode", "importEntry: propStr is not proper: " + propStr); ConcurrentLog.severe("KELONDRO", "URIMetadataNode: importEntry: propStr is not proper: " + propStr);
return null; return null;
} }
try { try {
return new URIMetadataNode(MapTools.s2p(propStr.substring(1, propStr.length() - 1)), collection); return new URIMetadataNode(MapTools.s2p(propStr.substring(1, propStr.length() - 1)), collection);
} catch (final kelondroException | MalformedURLException e) { } catch (final kelondroException | MalformedURLException e) {
// wrong format // wrong format
ConcurrentLog.severe("URIMetadataNode", e.getMessage()); ConcurrentLog.severe("KELONDRO", "URIMetadataNode: " + e.getMessage());
return null; return null;
} }
} }

@ -57,10 +57,10 @@ public class Word {
static { static {
try { try {
hashCache = new ConcurrentARC<String, byte[]>(hashCacheSize, Math.min(32, 2 * Runtime.getRuntime().availableProcessors())); hashCache = new ConcurrentARC<String, byte[]>(hashCacheSize, Math.min(32, 2 * Runtime.getRuntime().availableProcessors()));
ConcurrentLog.info("Word", "hashCache.size = " + hashCacheSize); ConcurrentLog.info("KELONDRO", "Word: hashCache.size = " + hashCacheSize);
} catch (final OutOfMemoryError e) { } catch (final OutOfMemoryError e) {
hashCache = new ConcurrentARC<String, byte[]>(1000, Math.min(8, 1 + Runtime.getRuntime().availableProcessors())); hashCache = new ConcurrentARC<String, byte[]>(1000, Math.min(8, 1 + Runtime.getRuntime().availableProcessors()));
ConcurrentLog.info("Word", "hashCache.size = " + 1000); ConcurrentLog.info("KELONDRO", "Word: hashCache.size = " + 1000);
} }
} }

@ -593,7 +593,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
p--; p--;
worker[p % cores0].add(this.container.get(p, false)); worker[p % cores0].add(this.container.get(p, false));
if (p % 100 == 0 && System.currentTimeMillis() > timeout) { if (p % 100 == 0 && System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("TransformDistributor", "distribution of WordReference entries to worker queues ended with timeout = " + this.maxtime); ConcurrentLog.warn("KELONDRO", "TransformDistributor: distribution of WordReference entries to worker queues ended with timeout = " + this.maxtime);
break; break;
} }
} }
@ -645,7 +645,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
while ((entry = this.in.take()) != WordReferenceRow.poisonRowEntry) { while ((entry = this.in.take()) != WordReferenceRow.poisonRowEntry) {
this.out.put(new WordReferenceVars(new WordReferenceRow(entry), local)); this.out.put(new WordReferenceVars(new WordReferenceRow(entry), local));
if (System.currentTimeMillis() > timeout) { if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("TransformWorker", "normalization of row entries from row to vars ended with timeout = " + this.maxtime); ConcurrentLog.warn("KELONDRO", "TransformWorker: normalization of row entries from row to vars ended with timeout = " + this.maxtime);
break; break;
} }
} }

@ -311,7 +311,7 @@ public final class Row implements Serializable {
try { try {
setCol(col.encoder, this.offset + clstrt, col.cellwidth, NumberTools.parseLongDecSubstring(token, p + 1)); setCol(col.encoder, this.offset + clstrt, col.cellwidth, NumberTools.parseLongDecSubstring(token, p + 1));
} catch (final NumberFormatException e) { } catch (final NumberFormatException e) {
ConcurrentLog.severe("kelondroRow", "NumberFormatException for celltype_cardinal, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'"); ConcurrentLog.severe("KELONDRO", "kelondroRow: NumberFormatException for celltype_cardinal, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
setCol(col.encoder, this.offset + clstrt, col.cellwidth, 0); setCol(col.encoder, this.offset + clstrt, col.cellwidth, 0);
} }
} else if ((decimalCardinal) && (col.celltype == Column.celltype_binary)) { } else if ((decimalCardinal) && (col.celltype == Column.celltype_binary)) {
@ -319,7 +319,7 @@ public final class Row implements Serializable {
try { try {
setCol(clstrt, col.cellwidth, new byte[]{(byte) NumberTools.parseIntDecSubstring(token, p + 1)}); setCol(clstrt, col.cellwidth, new byte[]{(byte) NumberTools.parseIntDecSubstring(token, p + 1)});
} catch (final NumberFormatException e) { } catch (final NumberFormatException e) {
ConcurrentLog.severe("kelondroRow", "NumberFormatException for celltype_binary, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'"); ConcurrentLog.severe("KELONDRO", "kelondroRow: NumberFormatException for celltype_binary, celltype = " + col.celltype + ", encoder = " + col.encoder + ", value = '" + token.substring(p + 1).trim() + "'");
setCol(clstrt, col.cellwidth, new byte[]{0}); setCol(clstrt, col.cellwidth, new byte[]{0});
} }
} else if ((decimalCardinal) && (col.celltype == Column.celltype_bitfield)) { } else if ((decimalCardinal) && (col.celltype == Column.celltype_bitfield)) {

@ -106,7 +106,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
this.rowdef = rowdef; this.rowdef = rowdef;
this.chunkcount = (int) exportedCollection.getColLong(exp_chunkcount); this.chunkcount = (int) exportedCollection.getColLong(exp_chunkcount);
if ((this.chunkcount > chunkcachelength / rowdef.objectsize)) { if ((this.chunkcount > chunkcachelength / rowdef.objectsize)) {
ConcurrentLog.warn("RowCollection", "corrected wrong chunkcount; chunkcount = " + this.chunkcount + ", chunkcachelength = " + chunkcachelength + ", rowdef.objectsize = " + rowdef.objectsize); ConcurrentLog.warn("KELONDRO", "RowCollection: corrected wrong chunkcount; chunkcount = " + this.chunkcount + ", chunkcachelength = " + chunkcachelength + ", rowdef.objectsize = " + rowdef.objectsize);
this.chunkcount = chunkcachelength / rowdef.objectsize; // patch problem this.chunkcount = chunkcachelength / rowdef.objectsize; // patch problem
} }
this.lastTimeWrote = (exportedCollection.getColLong(exp_last_wrote) + 10957) * day; this.lastTimeWrote = (exportedCollection.getColLong(exp_last_wrote) + 10957) * day;
@ -122,7 +122,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
throw new kelondroException("old collection order does not match with new order; objectOrder.signature = " + rowdef.objectOrder.signature() + ", oldOrder.signature = " + oldOrder.signature()); throw new kelondroException("old collection order does not match with new order; objectOrder.signature = " + rowdef.objectOrder.signature() + ", oldOrder.signature = " + oldOrder.signature());
this.sortBound = (int) exportedCollection.getColLong(exp_order_bound); this.sortBound = (int) exportedCollection.getColLong(exp_order_bound);
if (this.sortBound > this.chunkcount) { if (this.sortBound > this.chunkcount) {
ConcurrentLog.warn("RowCollection", "corrected wrong sortBound; sortBound = " + this.sortBound + ", chunkcount = " + this.chunkcount); ConcurrentLog.warn("KELONDRO", "RowCollection: corrected wrong sortBound; sortBound = " + this.sortBound + ", chunkcount = " + this.chunkcount);
this.sortBound = this.chunkcount; this.sortBound = this.chunkcount;
} }
this.chunkcache = exportedCollection.getColBytes(exp_collection, false); this.chunkcache = exportedCollection.getColBytes(exp_collection, false);
@ -860,7 +860,7 @@ public class RowCollection implements Sortable<Row.Entry>, Iterable<Row.Entry>,
i--; i--;
} }
} catch (final RuntimeException e) { } catch (final RuntimeException e) {
ConcurrentLog.warn("kelondroRowCollection", e.getMessage(), e); ConcurrentLog.warn("KELONDRO", "kelondroRowCollection: " + e.getMessage(), e);
} finally { } finally {
if (!u) sort(); if (!u) sort();
} }

@ -100,7 +100,7 @@ public class RowSet extends RowCollection implements Index, Iterable<Row.Entry>,
} }
//assert b.length - exportOverheadSize == size * rowdef.objectsize : "b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize; //assert b.length - exportOverheadSize == size * rowdef.objectsize : "b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize;
if (b.length - exportOverheadSize != alloc) { if (b.length - exportOverheadSize != alloc) {
ConcurrentLog.severe("RowSet", "exportOverheadSize wrong: b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize); ConcurrentLog.severe("KELONDRO", "RowSet: exportOverheadSize wrong: b.length = " + b.length + ", size * rowdef.objectsize = " + size * rowdef.objectsize);
return new RowSet(rowdef, 0); return new RowSet(rowdef, 0);
} }
System.arraycopy(b, (int) exportOverheadSize, chunkcache, 0, chunkcache.length); System.arraycopy(b, (int) exportOverheadSize, chunkcache, 0, chunkcache.length);
@ -509,13 +509,13 @@ public class RowSet extends RowCollection implements Index, Iterable<Row.Entry>,
try { try {
c0.sort(); c0.sort();
} catch (final Throwable e) { } catch (final Throwable e) {
ConcurrentLog.severe("RowSet", "collection corrupted. cleaned. " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "RowSet: collection corrupted. cleaned. " + e.getMessage(), e);
c0.clear(); c0.clear();
} }
try { try {
c1.sort(); c1.sort();
} catch (final Throwable e) { } catch (final Throwable e) {
ConcurrentLog.severe("RowSet", "collection corrupted. cleaned. " + e.getMessage(), e); ConcurrentLog.severe("KELONDRO", "RowSet: collection corrupted. cleaned. " + e.getMessage(), e);
c1.clear(); c1.clear();
} }
int c0i = 0, c1i = 0; int c0i = 0, c1i = 0;

@ -54,7 +54,7 @@ public abstract class AbstractReference implements Reference {
try { try {
return d == 0 ? 0 : d / positions().size(); return d == 0 ? 0 : d / positions().size();
} catch (ArithmeticException ex) { } catch (ArithmeticException ex) {
ConcurrentLog.fine("AbstractReference", "word distance calculation:" + ex.getMessage()); ConcurrentLog.fine("KELONDRO", "AbstractReference: word distance calculation:" + ex.getMessage());
return 0; return 0;
} }
} }

@ -48,7 +48,7 @@ import net.yacy.kelondro.util.MemoryControl;
*/ */
public class IODispatcher extends Thread { public class IODispatcher extends Thread {
private static final ConcurrentLog log = new ConcurrentLog("IODispatcher"); private static final ConcurrentLog log = new ConcurrentLog("KELONDRO IODispatcher");
private Semaphore controlQueue; // controls that only one io job is running private Semaphore controlQueue; // controls that only one io job is running
private final Semaphore termination; // released if thread is safe to terminate private final Semaphore termination; // released if thread is safe to terminate

@ -487,7 +487,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashes)); reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashes));
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
reduced = 0; reduced = 0;
ConcurrentLog.warn("IndexCell", "not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment"); ConcurrentLog.warn("KELONDRO", "IndexCell: not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
} }
//assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem(); //assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem();
return removed + (reduced / this.array.rowdef().objectsize); return removed + (reduced / this.array.rowdef().objectsize);
@ -503,7 +503,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashBytes)); reduced = this.array.reduce(termHash, new RemoveReducer<ReferenceType>(urlHashBytes));
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
reduced = 0; reduced = 0;
ConcurrentLog.warn("IndexCell", "not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment"); ConcurrentLog.warn("KELONDRO", "IndexCell: not possible to remove urlHashes from a RWI because of too low memory. Remove was not applied. Please increase RAM assignment");
} }
//assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem(); //assert this.array.mem() <= am : "am = " + am + ", array.mem() = " + this.array.mem();
return removed || (reduced > 0); return removed || (reduced > 0);

@ -310,14 +310,14 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
int k = 1; int k = 1;
ReferenceContainer<ReferenceType> c = new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(a, this.factory.getRow())); ReferenceContainer<ReferenceType> c = new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(a, this.factory.getRow()));
if (System.currentTimeMillis() > timeout) { if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in get() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME); ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timout in get() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c; return c;
} }
while (entries.hasNext()) { while (entries.hasNext()) {
c = c.merge(new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(entries.next(), this.factory.getRow()))); c = c.merge(new ReferenceContainer<ReferenceType>(this.factory, termHash, RowSet.importRowSet(entries.next(), this.factory.getRow())));
k++; k++;
if (System.currentTimeMillis() > timeout) { if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in get() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME); ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timout in get() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c; return c;
} }
} }
@ -333,7 +333,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
int c = RowSet.importRowCount(a, this.factory.getRow()); int c = RowSet.importRowCount(a, this.factory.getRow());
assert c >= 0; assert c >= 0;
if (System.currentTimeMillis() > timeout) { if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in count() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME); ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timeout in count() (1): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c; return c;
} }
while (entries.hasNext()) { while (entries.hasNext()) {
@ -341,7 +341,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
assert c >= 0; assert c >= 0;
k++; k++;
if (System.currentTimeMillis() > timeout) { if (System.currentTimeMillis() > timeout) {
ConcurrentLog.warn("ReferenceContainerArray", "timout in count() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME); ConcurrentLog.warn("KELONDRO", "ReferenceContainerArray: timeout in count() (2): " + k + " tables searched. timeout = " + METHOD_MAXRUNTIME);
return c; return c;
} }
} }
@ -402,7 +402,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkBestSmallFiles(final IODispatcher merger, final long targetFileSize) { public boolean shrinkBestSmallFiles(final IODispatcher merger, final long targetFileSize) {
final File[] ff = this.array.unmountBestMatch(2.0f, targetFileSize); final File[] ff = this.array.unmountBestMatch(2.0f, targetFileSize);
if (ff == null) return false; if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink1", "unmountBestMatch(2.0, " + targetFileSize + ")"); ConcurrentLog.info("KELONDRO", "RICELL-shrink1: unmountBestMatch(2.0, " + targetFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile()); merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true; return true;
} }
@ -410,7 +410,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkAnySmallFiles(final IODispatcher merger, final long targetFileSize) { public boolean shrinkAnySmallFiles(final IODispatcher merger, final long targetFileSize) {
final File[] ff = this.array.unmountSmallest(targetFileSize); final File[] ff = this.array.unmountSmallest(targetFileSize);
if (ff == null) return false; if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink2", "unmountSmallest(" + targetFileSize + ")"); ConcurrentLog.info("KELONDRO", "RICELL-shrink2: unmountSmallest(" + targetFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile()); merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true; return true;
} }
@ -418,7 +418,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkUpToMaxSizeFiles(final IODispatcher merger, final long maxFileSize) { public boolean shrinkUpToMaxSizeFiles(final IODispatcher merger, final long maxFileSize) {
final File[] ff = this.array.unmountBestMatch(2.0f, maxFileSize); final File[] ff = this.array.unmountBestMatch(2.0f, maxFileSize);
if (ff == null) return false; if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink3", "unmountBestMatch(2.0, " + maxFileSize + ")"); ConcurrentLog.info("KELONDRO", "RICELL-shrink3: unmountBestMatch(2.0, " + maxFileSize + ")");
merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile()); merger.merge(ff[0], ff[1], this.factory, this.array, newContainerBLOBFile());
return true; return true;
} }
@ -426,7 +426,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
public boolean shrinkOldFiles(final IODispatcher merger) { public boolean shrinkOldFiles(final IODispatcher merger) {
final File ff = this.array.unmountOldest(); final File ff = this.array.unmountOldest();
if (ff == null) return false; if (ff == null) return false;
ConcurrentLog.info("RICELL-shrink4/rewrite", "unmountOldest()"); ConcurrentLog.info("KELONDRO", "RICELL-shrink4/rewrite: unmountOldest()");
merger.merge(ff, null, this.factory, this.array, newContainerBLOBFile()); merger.merge(ff, null, this.factory, this.array, newContainerBLOBFile());
return true; return true;
} }

@ -60,7 +60,7 @@ import net.yacy.kelondro.util.FileUtils;
*/ */
public final class ReferenceContainerCache<ReferenceType extends Reference> extends AbstractIndex<ReferenceType> implements Index<ReferenceType>, IndexReader<ReferenceType>, Iterable<ReferenceContainer<ReferenceType>> { public final class ReferenceContainerCache<ReferenceType extends Reference> extends AbstractIndex<ReferenceType> implements Index<ReferenceType>, IndexReader<ReferenceType>, Iterable<ReferenceContainer<ReferenceType>> {
private static final ConcurrentLog log = new ConcurrentLog("ReferenceContainerCache"); private static final ConcurrentLog log = new ConcurrentLog("KELONDRO");
private final int termSize; private final int termSize;
private final ByteOrder termOrder; private final ByteOrder termOrder;

@ -67,15 +67,15 @@ public class ReferenceIterator <ReferenceType extends Reference> extends LookAhe
try { try {
row = RowSet.importRowSet(entry.getValue(), this.factory.getRow()); row = RowSet.importRowSet(entry.getValue(), this.factory.getRow());
if (row == null) { if (row == null) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because importRowSet returned null"); ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because importRowSet returned null");
continue; // thats a fail but not as REALLY bad if the whole method would crash here continue; // thats a fail but not as REALLY bad if the whole method would crash here
} }
return new ReferenceContainer<ReferenceType>(this.factory, entry.getKey(), row); return new ReferenceContainer<ReferenceType>(this.factory, entry.getKey(), row);
} catch (final SpaceExceededException e) { } catch (final SpaceExceededException e) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because of too low memory: " + e.toString()); ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because of too low memory: " + e.toString());
continue; continue;
} catch (final Throwable e) { } catch (final Throwable e) {
ConcurrentLog.severe("ReferenceIterator", "lost entry '" + UTF8.String(entry.getKey()) + "' because of error: " + e.toString()); ConcurrentLog.severe("KELONDRO", "ReferenceIterator: lost entry '" + UTF8.String(entry.getKey()) + "' because of error: " + e.toString());
continue; continue;
} }
} }

@ -181,7 +181,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try { try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime(); d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(element.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime();
} catch (final ParseException e) { } catch (final ParseException e) {
ConcurrentLog.severe("SplitTable", "", e); ConcurrentLog.severe("KELONDRO", "SplitTable: ", e);
continue; continue;
} }
time = d.getTime(); time = d.getTime();
@ -218,7 +218,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
// open next biggest table // open next biggest table
t.remove(maxf); t.remove(maxf);
f = new File(this.path, maxf); f = new File(this.path, maxf);
ConcurrentLog.info("kelondroSplitTable", "opening partial eco table " + f); ConcurrentLog.info("KELONDRO", "SplitTable: opening partial eco table " + f);
Table table; Table table;
try { try {
table = new Table(f, this.rowdef, EcoFSBufferSize, 0, this.useTailCache, this.exceed134217727, false); table = new Table(f, this.rowdef, EcoFSBufferSize, 0, this.useTailCache, this.exceed134217727, false);
@ -226,7 +226,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try { try {
table = new Table(f, this.rowdef, 0, 0, false, this.exceed134217727, false); table = new Table(f, this.rowdef, 0, 0, false, this.exceed134217727, false);
} catch (final SpaceExceededException ee) { } catch (final SpaceExceededException ee) {
ConcurrentLog.severe("SplitTable", "Table " + f.toString() + " cannot be initialized: " + ee.getMessage(), ee); ConcurrentLog.severe("KELONDRO", "SplitTable: Table " + f.toString() + " cannot be initialized: " + ee.getMessage(), ee);
continue maxfind; continue maxfind;
} }
} }
@ -373,7 +373,7 @@ public class SplitTable implements Index, Iterable<Row.Entry> {
try { try {
d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime(); d = GenericFormatter.SHORT_MILSEC_FORMATTER.parse(name.substring(this.prefix.length() + 1, this.prefix.length() + 18), 0).getTime().getTime();
} catch (final ParseException e) { } catch (final ParseException e) {
ConcurrentLog.severe("SplitTable", "", e); ConcurrentLog.severe("KELONDRO", "SplitTable", e);
d = 0; d = 0;
} }
if (d + this.fileAgeLimit < t || new File(this.path, name).length() >= this.fileSizeLimit) { if (d + this.fileAgeLimit < t || new File(this.path, name).length() >= this.fileSizeLimit) {

@ -71,7 +71,7 @@ import net.yacy.kelondro.util.kelondroException;
public class Table implements Index, Iterable<Row.Entry> { public class Table implements Index, Iterable<Row.Entry> {
// static tracker objects // static tracker objects
private final static ConcurrentLog log = new ConcurrentLog("TABLE"); private final static ConcurrentLog log = new ConcurrentLog("KELONDRO TABLE");
/** Map all active table instances by file name */ /** Map all active table instances by file name */
private final static Map<String, Table> tableTracker = new ConcurrentSkipListMap<String, Table>(); private final static Map<String, Table> tableTracker = new ConcurrentSkipListMap<String, Table>();

@ -20,16 +20,24 @@ else
ID="id" ID="id"
fi fi
# if java executable not found using 'which', let's try 'command'
if [ ! -x "$JAVA" ]
then
JAVA="`command -v java`"
fi
if [ ! -x "$JAVA" ] if [ ! -x "$JAVA" ]
then then
echo "The java command is not executable." echo "The java command is not executable."
echo "Either you have not installed java or it is not in your PATH" echo "Either you have not installed java or it is not in your PATH."
echo "You can also set a path to java manually, in \$JAVA option of $0 script."
#Cron supports setting the path in #Cron supports setting the path in
#echo "Has this script been invoked by CRON?" #echo "Has this script been invoked by CRON?"
#echo "if so, please set PATH in the crontab, or set the correct path in the variable in this script." #echo "if so, please set PATH in the crontab, or set the correct path in the variable in this script."
exit 1 exit 1
fi fi
usage() { usage() {
cat - <<USAGE cat - <<USAGE
startscript for YaCy on UNIX-like systems startscript for YaCy on UNIX-like systems

Loading…
Cancel
Save