*) Corrections for code cleanup 1175

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@1179 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
theli 20 years ago
parent 0e88ba997e
commit 8862b6ba4b

@ -74,9 +74,9 @@ public final class plasmaWordIndexDistribution {
private boolean enabled; private boolean enabled;
private boolean enabledWhileCrawling; private boolean enabledWhileCrawling;
private boolean closed; private boolean closed;
private boolean gzipBody; private boolean gzipBody4Distribution;
private int timeout; private int timeout4Distribution;
private int maxOpenFiles; private int maxOpenFiles4Distribution;
public transferIndexThread transferIdxThread = null; public transferIndexThread transferIdxThread = null;
@ -97,9 +97,9 @@ public final class plasmaWordIndexDistribution {
this.log = log; this.log = log;
this.closed = false; this.closed = false;
setCounts(100 /*indexCount*/, 1 /*juniorPeerCount*/, 3 /*seniorPeerCount*/, 8000); setCounts(100 /*indexCount*/, 1 /*juniorPeerCount*/, 3 /*seniorPeerCount*/, 8000);
this.gzipBody = gzipBody; this.gzipBody4Distribution = gzipBody;
this.timeout = timeout; this.timeout4Distribution = timeout;
this.maxOpenFiles = maxOpenFiles; this.maxOpenFiles4Distribution = maxOpenFiles;
} }
public void enable() { public void enable() {
@ -200,7 +200,7 @@ public final class plasmaWordIndexDistribution {
// collect index // collect index
String startPointHash = selectTransferStart(); String startPointHash = selectTransferStart();
log.logFine("Selected hash " + startPointHash + " as start point for index distribution, distance = " + yacyDHTAction.dhtDistance(yacyCore.seedDB.mySeed.hash, startPointHash)); log.logFine("Selected hash " + startPointHash + " as start point for index distribution, distance = " + yacyDHTAction.dhtDistance(yacyCore.seedDB.mySeed.hash, startPointHash));
Object[] selectResult = selectTransferIndexes(startPointHash, indexCount, this.maxOpenFiles); Object[] selectResult = selectTransferIndexes(startPointHash, indexCount, this.maxOpenFiles4Distribution);
plasmaWordIndexEntity[] indexEntities = (plasmaWordIndexEntity[]) selectResult[0]; plasmaWordIndexEntity[] indexEntities = (plasmaWordIndexEntity[]) selectResult[0];
//Integer openedFiles = (Integer) selectResult[2]; //Integer openedFiles = (Integer) selectResult[2];
HashMap urlCache = (HashMap) selectResult[1]; // String (url-hash) / plasmaCrawlLURL.Entry HashMap urlCache = (HashMap) selectResult[1]; // String (url-hash) / plasmaCrawlLURL.Entry
@ -235,7 +235,7 @@ public final class plasmaWordIndexDistribution {
((avdist = (yacyDHTAction.dhtDistance(seed.hash, indexEntities[0].wordHash()) + ((avdist = (yacyDHTAction.dhtDistance(seed.hash, indexEntities[0].wordHash()) +
yacyDHTAction.dhtDistance(seed.hash, indexEntities[indexEntities.length-1].wordHash())) / 2.0) < 0.3)) { yacyDHTAction.dhtDistance(seed.hash, indexEntities[indexEntities.length-1].wordHash())) / 2.0) < 0.3)) {
start = System.currentTimeMillis(); start = System.currentTimeMillis();
error = yacyClient.transferIndex(seed, indexEntities, urlCache, this.gzipBody, this.timeout); error = yacyClient.transferIndex(seed, indexEntities, urlCache, this.gzipBody4Distribution, this.timeout4Distribution);
if (error == null) { if (error == null) {
log.logInfo("Index transfer of " + indexCount + " words [" + indexEntities[0].wordHash() + " .. " + indexEntities[indexEntities.length-1].wordHash() + "]/" + log.logInfo("Index transfer of " + indexCount + " words [" + indexEntities[0].wordHash() + " .. " + indexEntities[indexEntities.length-1].wordHash() + "]/" +
avdist + " to peer " + seed.getName() + ":" + seed.hash + " in " + avdist + " to peer " + seed.getName() + ":" + seed.hash + " in " +
@ -489,6 +489,10 @@ public final class plasmaWordIndexDistribution {
} }
private class transferIndexWorkerThread extends Thread{ private class transferIndexWorkerThread extends Thread{
// connection properties
private boolean gzipBody4Transfer = false;
private int timeout4Transfer = 60000;
// status fields // status fields
private boolean finished = false; private boolean finished = false;
@ -514,12 +518,16 @@ public final class plasmaWordIndexDistribution {
yacySeed seed, yacySeed seed,
plasmaWordIndexEntity[] indexEntities, plasmaWordIndexEntity[] indexEntities,
HashMap urlCache, HashMap urlCache,
boolean gzipBody,
int timeout,
long iteration, long iteration,
int idxCount, int idxCount,
int chunkSize, int chunkSize,
String endPointHash, String endPointHash,
String startPointHash) { String startPointHash) {
super(new ThreadGroup("TransferIndexThreadGroup"),"TransferIndexWorker_" + seed.getName()); super(new ThreadGroup("TransferIndexThreadGroup"),"TransferIndexWorker_" + seed.getName());
this.gzipBody4Transfer = gzipBody;
this.timeout4Transfer = timeout;
this.iteration = iteration; this.iteration = iteration;
this.seed = seed; this.seed = seed;
this.indexEntities = indexEntities; this.indexEntities = indexEntities;
@ -585,7 +593,7 @@ public final class plasmaWordIndexDistribution {
// transfering seleted words to remote peer // transfering seleted words to remote peer
this.status = "Running: Transfering chunk " + iteration; this.status = "Running: Transfering chunk " + iteration;
String error = yacyClient.transferIndex(seed, indexEntities, urlCache, gzipBody, timeout); String error = yacyClient.transferIndex(seed, indexEntities, urlCache, gzipBody4Transfer, timeout4Transfer);
if (error == null) { if (error == null) {
// words successfully transfered // words successfully transfered
transferTime = System.currentTimeMillis() - start; transferTime = System.currentTimeMillis() - start;
@ -653,6 +661,9 @@ public final class plasmaWordIndexDistribution {
private yacySeed seed = null; private yacySeed seed = null;
private boolean delete = false; private boolean delete = false;
private boolean finished = false; private boolean finished = false;
private boolean gzipBody4Transfer = false;
private int timeout4Transfer = 60000;
private int maxOpenFiles4Transfer = 800;
private int transferedEntryCount = 0; private int transferedEntryCount = 0;
private int transferedEntityCount = 0; private int transferedEntityCount = 0;
private String status = "Running"; private String status = "Running";
@ -669,9 +680,9 @@ public final class plasmaWordIndexDistribution {
this.delete = delete; this.delete = delete;
this.sb = plasmaSwitchboard.getSwitchboard(); this.sb = plasmaSwitchboard.getSwitchboard();
this.initialWordsDBSize = sb.wordIndex.size(); this.initialWordsDBSize = sb.wordIndex.size();
//gzipBody = "true".equalsIgnoreCase(sb.getConfig("indexTransfer.gzipBody","false")); this.gzipBody4Transfer = "true".equalsIgnoreCase(sb.getConfig("indexTransfer.gzipBody","false"));
//timeout = (int) sb.getConfigLong("indexTransfer.timeout",60000); this.timeout4Transfer = (int) sb.getConfigLong("indexTransfer.timeout",60000);
//this.maxOpenFiles = (int) sb.getConfigLong("indexTransfer.maxOpenFiles",800); this.maxOpenFiles4Transfer = (int) sb.getConfigLong("indexTransfer.maxOpenFiles",800);
} }
public void run() { public void run() {
@ -765,7 +776,7 @@ public final class plasmaWordIndexDistribution {
// selecting 500 words to transfer // selecting 500 words to transfer
this.status = "Running: Selecting chunk " + iteration; this.status = "Running: Selecting chunk " + iteration;
Object[] selectResult = selectTransferIndexes(this.startPointHash, this.chunkSize, maxOpenFiles - openedFiles.intValue()); Object[] selectResult = selectTransferIndexes(this.startPointHash, this.chunkSize, this.maxOpenFiles4Transfer - openedFiles.intValue());
newIndexEntities = (plasmaWordIndexEntity[]) selectResult[0]; newIndexEntities = (plasmaWordIndexEntity[]) selectResult[0];
HashMap urlCache = (HashMap) selectResult[1]; // String (url-hash) / plasmaCrawlLURL.Entry HashMap urlCache = (HashMap) selectResult[1]; // String (url-hash) / plasmaCrawlLURL.Entry
openedFiles = (Integer) selectResult[2]; openedFiles = (Integer) selectResult[2];
@ -867,7 +878,7 @@ public final class plasmaWordIndexDistribution {
// handover chunk to transfer worker // handover chunk to transfer worker
if (!((newIndexEntities == null) || (newIndexEntities.length == 0))) { if (!((newIndexEntities == null) || (newIndexEntities.length == 0))) {
worker = new transferIndexWorkerThread(seed,newIndexEntities,urlCache,iteration,idxCount,idxCount,startPointHash,oldStartingPointHash); worker = new transferIndexWorkerThread(seed,newIndexEntities,urlCache,gzipBody4Transfer,timeout4Transfer,iteration,idxCount,idxCount,startPointHash,oldStartingPointHash);
worker.start(); worker.start();
} }
} }

@ -104,8 +104,8 @@ public abstract class serverAbstractSwitch implements serverSwitch {
} }
// doing a config settings migration // doing a config settings migration
//HashMap migratedSettings = migrateSwitchConfigSettings((HashMap) removedProps); HashMap migratedSettings = migrateSwitchConfigSettings((HashMap) removedProps);
//if (migratedSettings != null) configProps.putAll(migratedSettings); if (migratedSettings != null) configProps.putAll(migratedSettings);
// merge new props from init to config // merge new props from init to config
// this is necessary for migration, when new properties are attached // this is necessary for migration, when new properties are attached
@ -130,7 +130,7 @@ public abstract class serverAbstractSwitch implements serverSwitch {
serverJobs = 0; serverJobs = 0;
} }
/*
public static HashMap migrateSwitchConfigSettings(HashMap removedSettings) { public static HashMap migrateSwitchConfigSettings(HashMap removedSettings) {
if ((removedSettings == null) || (removedSettings.size() == 0)) return null; if ((removedSettings == null) || (removedSettings.size() == 0)) return null;
HashMap migratedSettings = new HashMap(); HashMap migratedSettings = new HashMap();
@ -145,7 +145,7 @@ public abstract class serverAbstractSwitch implements serverSwitch {
return migratedSettings; return migratedSettings;
} }
*/
// a logger for this switchboard // a logger for this switchboard
public void setLog(serverLog log) { public void setLog(serverLog log) {

Loading…
Cancel
Save