- removed unused methods

- fixed xss attack on peer list in CrawlStartSimple

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4714 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent 14384e7a45
commit 512f48e7d6

@ -135,8 +135,8 @@ public class CrawlStartSimple_p {
if (peer == null) peername = record.originator(); else peername = peer.getName();
prop.put("otherCrawlStartFinished_" + showedCrawl + "_dark", dark ? "1" : "0");
prop.put("otherCrawlStartFinished_" + showedCrawl + "_cre", record.created().toString());
prop.put("otherCrawlStartFinished_" + showedCrawl + "_peername", peername);
prop.put("otherCrawlStartFinished_" + showedCrawl + "_startURL", record.attributes().get("startURL").toString());
prop.putHTML("otherCrawlStartFinished_" + showedCrawl + "_peername", peername);
prop.putHTML("otherCrawlStartFinished_" + showedCrawl + "_startURL", record.attributes().get("startURL").toString());
prop.put("otherCrawlStartFinished_" + showedCrawl + "_intention", record.attributes().get("intention").toString());
prop.put("otherCrawlStartFinished_" + showedCrawl + "_generalDepth", record.attributes().get("generalDepth"));
prop.put("otherCrawlStartFinished_" + showedCrawl + "_crawlingQ", (record.attributes().get("crawlingQ").equals("true")) ? "1" : "0");
@ -162,7 +162,7 @@ public class CrawlStartSimple_p {
yacySeed seed;
while ((availcount < maxcount) && (crawlavail.hasNext())) {
seed = crawlavail.next();
prop.put("remoteCrawlPeers_available_" + availcount + "_name", seed.getName());
prop.putHTML("remoteCrawlPeers_available_" + availcount + "_name", seed.getName());
prop.put("remoteCrawlPeers_available_" + availcount + "_due", (yacyCore.yacyTime() - seed.available));
availcount++;
}
@ -170,7 +170,7 @@ public class CrawlStartSimple_p {
int pendicount = 0;
while ((pendicount < maxcount) && (crawlpendi.hasNext())) {
seed = crawlpendi.next();
prop.put("remoteCrawlPeers_busy_" + pendicount + "_name", seed.getName());
prop.putHTML("remoteCrawlPeers_busy_" + pendicount + "_name", seed.getName());
prop.put("remoteCrawlPeers_busy_" + pendicount + "_due", (yacyCore.yacyTime() - seed.available));
pendicount++;
}

@ -160,8 +160,7 @@ public final class transferRWI {
// block blacklisted entries
if ((blockBlacklist) && (plasmaSwitchboard.urlBlacklist.hashInBlacklistedCache(indexReferenceBlacklist.BLACKLIST_DHT, urlHash))) {
//int deleted = sb.wordIndex.tryRemoveURLs(urlHash);
//yacyCore.log.logFine("transferRWI: blocked blacklisted URLHash '" + urlHash + "' from peer " + otherPeerName + "; deleted " + deleted + " URL entries from RWIs");
yacyCore.log.logFine("transferRWI: blocked blacklisted URLHash '" + urlHash + "' from peer " + otherPeerName);
blocked++;
continue;
}

@ -138,8 +138,7 @@ public final class transferURL {
// check if the entry is blacklisted
if ((blockBlacklist) && (plasmaSwitchboard.urlBlacklist.isListed(indexReferenceBlacklist.BLACKLIST_DHT, comp.url()))) {
//int deleted = sb.wordIndex.tryRemoveURLs(lEntry.hash());
//yacyCore.log.logFine("transferURL: blocked blacklisted URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName + "; deleted " + deleted + " URL entries from RWIs");
yacyCore.log.logFine("transferURL: blocked blacklisted URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName);
lEntry = null;
blocked++;
continue;

@ -426,35 +426,6 @@ public final class indexContainerHeap {
return false;
}
public synchronized int removeReference(String urlHash) {
// this tries to delete an index from the cache that has this
// urlHash assigned. This can only work if the entry is really fresh
// Such entries must be searched in the latest entries
assert this.cache != null;
assert !this.readOnlyMode;
int delCount = 0;
Iterator<Map.Entry<String, indexContainer>> i = cache.entrySet().iterator();
Map.Entry<String, indexContainer> entry;
String wordhash;
indexContainer c;
while (i.hasNext()) {
entry = i.next();
wordhash = entry.getKey();
// get container
c = entry.getValue();
if (c.remove(urlHash) != null) {
if (c.size() == 0) {
i.remove();
} else {
cache.put(wordhash, c); // superfluous?
}
delCount++;
}
}
return delCount;
}
public synchronized int removeReferences(String wordHash, Set<String> urlHashes) {
assert this.cache != null;
assert !this.readOnlyMode;

@ -268,10 +268,6 @@ public final class indexRAMRI implements indexRI, indexRIReader {
return 0;
}
public synchronized int tryRemoveURLs(String urlHash) {
return heap.removeReference(urlHash);
}
public synchronized void addEntries(indexContainer container) {
// this puts the entries into the cache, not into the assortment directly
if ((container == null) || (container.size() == 0)) return;

@ -575,16 +575,6 @@ public final class plasmaWordIndex implements indexRI {
return count;
}
public int tryRemoveURLs(String urlHash) {
// this tries to delete an index from the cache that has this
// urlHash assigned. This can only work if the entry is really fresh
// and can be found in the RAM cache
// this returns the number of deletion that had been possible
int d = 0;
d = dhtInCache.tryRemoveURLs(urlHash);
if (d > 0) return d; else return dhtOutCache.tryRemoveURLs(urlHash);
}
public synchronized TreeSet<indexContainer> indexContainerSet(String startHash, boolean ram, boolean rot, int count) {
// creates a set of indexContainers
// this does not use the dhtInCache

Loading…
Cancel
Save