* convert byte[] hashes to string for log output

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5830 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
f1ori 16 years ago
parent 94a6c83256
commit 2f860a2564

@ -167,7 +167,7 @@ public final class BufferedIndexCollection<ReferenceType extends Reference> exte
container.addUnique(elm.toKelondroEntry()); container.addUnique(elm.toKelondroEntry());
} }
} }
if (container.size() < beforeDouble) System.out.println("*** DEBUG DOUBLECHECK - removed " + (beforeDouble - container.size()) + " index entries from word container " + container.getTermHash()); if (container.size() < beforeDouble) System.out.println("*** DEBUG DOUBLECHECK - removed " + (beforeDouble - container.size()) + " index entries from word container " + container.getTermHashAsString());
return container; return container;
} }

@ -87,6 +87,10 @@ public class ReferenceContainer<ReferenceType extends Reference> extends RowSet
return termHash; return termHash;
} }
public String getTermHashAsString() {
return new String(termHash);
}
public void add(final Reference entry) { public void add(final Reference entry) {
// add without double-occurrence test // add without double-occurrence test
assert entry.toKelondroEntry().objectsize() == super.rowdef.objectsize; assert entry.toKelondroEntry().objectsize() == super.rowdef.objectsize;

@ -389,7 +389,7 @@ public class plasmaRankingCRProcess {
CloneableIterator<Row.Entry> cr_entry; CloneableIterator<Row.Entry> cr_entry;
while (i.hasNext()) { while (i.hasNext()) {
keycollection = i.next(); keycollection = i.next();
referee = new String(keycollection.getTermHash()); referee = keycollection.getTermHashAsString();
if (referee.length() == 6) refereeDom = referee; else refereeDom = referee.substring(6); if (referee.length() == 6) refereeDom = referee; else refereeDom = referee.substring(6);
cr_entry = keycollection.rows(); cr_entry = keycollection.rows();

@ -703,7 +703,7 @@ public final class plasmaWordIndex {
} }
if (urlHashs.size() > 0) try { if (urlHashs.size() > 0) try {
final int removed = index.remove(container.getTermHash(), urlHashs); final int removed = index.remove(container.getTermHash(), urlHashs);
Log.logFine("INDEXCLEANER", container.getTermHash() + ": " + removed + " of " + container.size() + " URL-entries deleted"); Log.logFine("INDEXCLEANER", container.getTermHashAsString() + ": " + removed + " of " + container.size() + " URL-entries deleted");
lastWordHash = container.getTermHash(); lastWordHash = container.getTermHash();
lastDeletionCounter = urlHashs.size(); lastDeletionCounter = urlHashs.size();
urlHashs.clear(); urlHashs.clear();

@ -199,7 +199,7 @@ public class Dispatcher {
while (it.hasNext()) { while (it.hasNext()) {
urlHashes.add(it.next().metadataHash()); urlHashes.add(it.next().metadataHash());
} }
if (this.log.isFine()) this.log.logFine("selected " + urlHashes.size() + " urls for word '" + c.getTermHash() + "'"); if (this.log.isFine()) this.log.logFine("selected " + urlHashes.size() + " urls for word '" + c.getTermHashAsString() + "'");
if (urlHashes.size() > 0) this.backend.remove(c.getTermHash(), urlHashes); if (urlHashes.size() > 0) this.backend.remove(c.getTermHash(), urlHashes);
} }

@ -204,7 +204,7 @@ public class Transmission {
Iterator<ReferenceContainer<WordReference>> i = this.containers.iterator(); Iterator<ReferenceContainer<WordReference>> i = this.containers.iterator();
ReferenceContainer<WordReference> firstContainer = (i == null) ? null : i.next(); ReferenceContainer<WordReference> firstContainer = (i == null) ? null : i.next();
log.logInfo("Index transfer of " + this.containers.size() + log.logInfo("Index transfer of " + this.containers.size() +
" words [" + ((firstContainer == null) ? null : firstContainer.getTermHash()) + " .. " + new String(this.primaryTarget) + "]" + " words [" + ((firstContainer == null) ? null : firstContainer.getTermHashAsString()) + " .. " + new String(this.primaryTarget) + "]" +
" and " + this.references.size() + " URLs" + " and " + this.references.size() + " URLs" +
" to peer " + target.getName() + ":" + target.hash + " to peer " + target.getName() + ":" + target.hash +
" in " + (transferTime / 1000) + " in " + (transferTime / 1000) +

@ -705,7 +705,7 @@ public final class yacy {
} }
if (wordCounter%500 == 0) { if (wordCounter%500 == 0) {
wordChunkEndHash = new String(wordIdxContainer.getTermHash()); wordChunkEndHash = wordIdxContainer.getTermHashAsString();
wordChunkEnd = System.currentTimeMillis(); wordChunkEnd = System.currentTimeMillis();
final long duration = wordChunkEnd - wordChunkStart; final long duration = wordChunkEnd - wordChunkStart;
log.logInfo(wordCounter + " words scanned " + log.logInfo(wordCounter + " words scanned " +
@ -884,7 +884,7 @@ public final class yacy {
bos.write(container.getTermHash()); bos.write(container.getTermHash());
bos.write(serverCore.CRLF); bos.write(serverCore.CRLF);
if (counter % 500 == 0) { if (counter % 500 == 0) {
log.logInfo("Found " + counter + " Hashs until now. Last found Hash: " + container.getTermHash()); log.logInfo("Found " + counter + " Hashs until now. Last found Hash: " + container.getTermHashAsString());
} }
} }
} }
@ -901,14 +901,14 @@ public final class yacy {
bos.write(container.getTermHash()); bos.write(container.getTermHash());
bos.write(serverCore.CRLF); bos.write(serverCore.CRLF);
if (counter % 500 == 0) { if (counter % 500 == 0) {
log.logInfo("Found " + counter + " Hashs until now. Last found Hash: " + container.getTermHash()); log.logInfo("Found " + counter + " Hashs until now. Last found Hash: " + container.getTermHashAsString());
} }
} }
} }
bos.flush(); bos.flush();
bos.close(); bos.close();
} }
log.logInfo("Total number of Hashs: " + counter + ". Last found Hash: " + (container == null ? "null" : container.getTermHash())); log.logInfo("Total number of Hashs: " + counter + ". Last found Hash: " + (container == null ? "null" : container.getTermHashAsString()));
} catch (final IOException e) { } catch (final IOException e) {
log.logSevere("IOException", e); log.logSevere("IOException", e);
} }

Loading…
Cancel
Save