*) More debugging output for migrateWords

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@1085 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
theli 19 years ago
parent 9b35ae9027
commit ca26aab9b1

@ -169,6 +169,8 @@ public class zipParser extends AbstractParser implements Parser {
docImages); docImages);
} catch (Exception e) { } catch (Exception e) {
throw new ParserException("Unable to parse the zip content. " + e.getMessage()); throw new ParserException("Unable to parse the zip content. " + e.getMessage());
} catch (Error e) {
throw new ParserException("Unable to parse the zip content. " + e.getMessage());
} }
} }

@ -550,11 +550,11 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
} }
} }
public int migrateWords2Assortment(String wordhash) throws IOException { public Object migrateWords2Assortment(String wordhash) throws IOException {
// returns the number of entries that had been added to the assortments // returns the number of entries that had been added to the assortments
// can be negative if some assortments have been moved to the backend // can be negative if some assortments have been moved to the backend
File db = plasmaWordIndexEntity.wordHash2path(databaseRoot, wordhash); File db = plasmaWordIndexEntity.wordHash2path(databaseRoot, wordhash);
if (!(db.exists())) return 0; if (!(db.exists())) return "not available";
plasmaWordIndexEntity entity = null; plasmaWordIndexEntity entity = null;
try { try {
entity = new plasmaWordIndexEntity(databaseRoot, wordhash, true); entity = new plasmaWordIndexEntity(databaseRoot, wordhash, true);
@ -562,7 +562,7 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
if (size > assortmentCluster.clusterCapacity) { if (size > assortmentCluster.clusterCapacity) {
// this will be too big to integrate it // this will be too big to integrate it
entity.close(); entity = null; entity.close(); entity = null;
return 0; return "too big";
} else { } else {
// take out all words from the assortment to see if it fits // take out all words from the assortment to see if it fits
// together with the extracted assortment // together with the extracted assortment
@ -571,7 +571,7 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
// this will also be too big to integrate, add to entity // this will also be too big to integrate, add to entity
entity.addEntries(container); entity.addEntries(container);
entity.close(); entity = null; entity.close(); entity = null;
return -container.size(); return new Integer(-container.size());
} else { } else {
// the combined container will fit, read the container // the combined container will fit, read the container
try { try {
@ -579,7 +579,7 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
plasmaWordIndexEntry entry; plasmaWordIndexEntry entry;
while (entries.hasNext()) { while (entries.hasNext()) {
entry = (plasmaWordIndexEntry) entries.next(); entry = (plasmaWordIndexEntry) entries.next();
System.out.println("ENTRY = " + entry.getUrlHash()); // System.out.println("ENTRY = " + entry.getUrlHash());
container.add(new plasmaWordIndexEntry[]{entry}, System.currentTimeMillis()); container.add(new plasmaWordIndexEntry[]{entry}, System.currentTimeMillis());
} }
// we have read all elements, now delete the entity // we have read all elements, now delete the entity
@ -587,12 +587,12 @@ public final class plasmaWordIndexCache implements plasmaWordIndexInterface {
entity.close(); entity = null; entity.close(); entity = null;
// integrate the container into the assortments; this will work // integrate the container into the assortments; this will work
assortmentCluster.storeTry(wordhash, container); assortmentCluster.storeTry(wordhash, container);
return size; return new Integer(size);
} catch (kelondroException e) { } catch (kelondroException e) {
// database corrupted, we simply give up the database and delete it // database corrupted, we simply give up the database and delete it
try {entity.close();} catch (Exception ee) {} entity = null; try {entity.close();} catch (Exception ee) {} entity = null;
try {db.delete();} catch (Exception ee) {} try {db.delete();} catch (Exception ee) {}
return 0; return "database corrupted; deleted";
} }
} }
} }

@ -622,18 +622,23 @@ public final class yacy {
enumerateFiles words = new enumerateFiles(new File(dbroot, "WORDS"), true, false, true, true); enumerateFiles words = new enumerateFiles(new File(dbroot, "WORDS"), true, false, true, true);
String wordhash; String wordhash;
File wordfile; File wordfile;
int migration; Object migrationStatus;
while (words.hasMoreElements()) try { while (words.hasMoreElements()) try {
wordfile = (File) words.nextElement(); wordfile = (File) words.nextElement();
wordhash = wordfile.getName().substring(0, 12); wordhash = wordfile.getName().substring(0, 12);
System.out.println("NOW: " + wordhash); System.out.println("NOW: " + wordhash);
migration = wordIndexCache.migrateWords2Assortment(wordhash); migrationStatus = wordIndexCache.migrateWords2Assortment(wordhash);
if (migration == 0) if (migrationStatus instanceof Integer) {
log.logInfo("SKIPPED " + wordhash + ": " + ((wordfile.exists()) ? "too big" : "database corrupted; deleted")); int migrationCount = ((Integer)migrationStatus).intValue();
else if (migration > 0) if (migrationCount == 0)
log.logInfo("MIGRATED " + wordhash + ": " + migration + " entries"); log.logInfo("SKIPPED " + wordhash + ": empty");
else else if (migrationCount > 0)
log.logInfo("REVERSED " + wordhash + ": " + (-migration) + " entries"); log.logInfo("MIGRATED " + wordhash + ": " + migrationCount + " entries");
else
log.logInfo("REVERSED " + wordhash + ": " + (-migrationCount) + " entries");
} else if (migrationStatus instanceof String) {
log.logInfo("SKIPPED " + wordhash + ": " + migrationStatus);
}
} catch (Exception e) { } catch (Exception e) {
e.printStackTrace(); e.printStackTrace();
} }

Loading…
Cancel
Save