diff --git a/htroot/yacy/search.java b/htroot/yacy/search.java index 26e6b0e8d..7be788821 100644 --- a/htroot/yacy/search.java +++ b/htroot/yacy/search.java @@ -178,7 +178,7 @@ public final class search { yacyCore.log.logInfo("INIT HASH SEARCH (query-" + abstracts + "): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links"); // make event - plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, rankingProfile, sb.wordIndex, null, true, abstractSet); + plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, rankingProfile, sb.wordIndex, null, true); urlRetrievalAllTime = theSearch.getURLRetrievalTime(); snippetComputationAllTime = theSearch.getSnippetComputationTime(); diff --git a/htroot/yacysearch.java b/htroot/yacysearch.java index 9674cfea6..db9f768d0 100644 --- a/htroot/yacysearch.java +++ b/htroot/yacysearch.java @@ -293,7 +293,7 @@ public class yacysearch { theQuery.setOffset(0); // in case that this is a new search, always start without a offset offset = 0; } - plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, ranking, sb.wordIndex, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false, null); + plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, ranking, sb.wordIndex, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false); // generate result object serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER ORDERING OF SEARCH RESULTS: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds"); diff --git a/source/de/anomic/plasma/plasmaCondenser.java b/source/de/anomic/plasma/plasmaCondenser.java index e2400be1d..6dae035c8 100644 --- a/source/de/anomic/plasma/plasmaCondenser.java +++ b/source/de/anomic/plasma/plasmaCondenser.java @@ -660,7 +660,7 @@ public final class plasmaCondenser { return ("$%&/()=\"$%&/()=`^+*~#'-_:;,|<>[]\\".indexOf(c) >= 0); } - public static Enumeration wordTokenizer(String s, String charset, int minLength) { + public static Enumeration wordTokenizer(String s, String charset, int minLength) { try { return new sievedWordsEnum(new ByteArrayInputStream(s.getBytes()), charset, minLength); } catch (Exception e) { @@ -668,10 +668,10 @@ public final class plasmaCondenser { } } - public static class sievedWordsEnum implements Enumeration { + public static class sievedWordsEnum implements Enumeration { // this enumeration removes all words that contain either wrong characters or are too short - Object buffer = null; + StringBuffer buffer = null; unsievedWordsEnum e; int ml; @@ -685,7 +685,7 @@ public final class plasmaCondenser { e.pre(x); } - private Object nextElement0() { + private StringBuffer nextElement0() { StringBuffer s; char c; loop: while (e.hasMoreElements()) { @@ -709,8 +709,8 @@ public final class plasmaCondenser { return buffer != null; } - public Object nextElement() { - Object r = buffer; + public StringBuffer nextElement() { + StringBuffer r = buffer; buffer = nextElement0(); return r; } @@ -794,7 +794,7 @@ public final class plasmaCondenser { } } - public static class sentencesFromInputStreamEnum implements Iterator { + public static class sentencesFromInputStreamEnum implements Iterator { // read sentences from a given input stream // this enumerates StringBuffer objects @@ -836,7 +836,7 @@ public final class plasmaCondenser { return buffer != null; } - public Object next() { + public StringBuffer next() { if (buffer == null) { return null; } else { diff --git a/source/de/anomic/plasma/plasmaCrawlStacker.java b/source/de/anomic/plasma/plasmaCrawlStacker.java index e88ed3085..4253a77ec 100644 --- a/source/de/anomic/plasma/plasmaCrawlStacker.java +++ b/source/de/anomic/plasma/plasmaCrawlStacker.java @@ -77,7 +77,7 @@ public final class plasmaCrawlStacker extends Thread { // keys for different database types public static final int QUEUE_DB_TYPE_RAM = 0; public static final int QUEUE_DB_TYPE_TREE = 1; - public static final int QUEUE_DB_TYPE_FLEX = 2; + public static final int QUEUE_DB_TYPE_ECO = 2; final serverLog log = new serverLog("STACKCRAWL"); @@ -282,7 +282,7 @@ public final class plasmaCrawlStacker extends Thread { if (this.dbtype == QUEUE_DB_TYPE_RAM) { // do nothing.. } - if (this.dbtype == QUEUE_DB_TYPE_FLEX) { + if (this.dbtype == QUEUE_DB_TYPE_ECO) { new File(cacheStacksPath, stackfile).delete(); //kelondroFlexWidthArray.delete(cacheStacksPath, stackfile); } @@ -298,7 +298,7 @@ public final class plasmaCrawlStacker extends Thread { if (this.dbtype == QUEUE_DB_TYPE_RAM) { this.urlEntryCache = new kelondroRowSet(plasmaCrawlEntry.rowdef, 0); } - if (this.dbtype == QUEUE_DB_TYPE_FLEX) { + if (this.dbtype == QUEUE_DB_TYPE_ECO) { cacheStacksPath.mkdirs(); File f = new File(cacheStacksPath, stackfile); try { diff --git a/source/de/anomic/plasma/plasmaParserDocument.java b/source/de/anomic/plasma/plasmaParserDocument.java index ca1360410..6669f6942 100644 --- a/source/de/anomic/plasma/plasmaParserDocument.java +++ b/source/de/anomic/plasma/plasmaParserDocument.java @@ -229,7 +229,7 @@ public class plasmaParserDocument { return -1; } - public Iterator getSentences(boolean pre) { + public Iterator getSentences(boolean pre) { if (this.text == null) return null; plasmaCondenser.sentencesFromInputStreamEnum e = plasmaCondenser.sentencesFromInputStream(getText(), this.charset); e.pre(pre); @@ -248,8 +248,8 @@ public class plasmaParserDocument { if (hs.size() == 0) return ""; // generate a new list StringBuffer sb = new StringBuffer(this.keywords.size() * 6); - Iterator i = hs.iterator(); - while (i.hasNext()) sb.append((String) i.next()).append(separator); + Iterator i = hs.iterator(); + while (i.hasNext()) sb.append(i.next()).append(separator); return sb.substring(0, sb.length() - 1); } @@ -303,24 +303,23 @@ public class plasmaParserDocument { private synchronized void resortLinks() { // extract hyperlinks, medialinks and emaillinks from anchorlinks - Iterator i; yacyURL url; String u; int extpos, qpos; String ext = null; - i = anchors.entrySet().iterator(); + Iterator> i = anchors.entrySet().iterator(); hyperlinks = new HashMap(); videolinks = new HashMap(); audiolinks = new HashMap(); applinks = new HashMap(); emaillinks = new HashMap(); TreeSet collectedImages = new TreeSet(); // this is a set that is collected now and joined later to the imagelinks - Map.Entry entry; + Map.Entry entry; while (i.hasNext()) { - entry = (Map.Entry) i.next(); - u = (String) entry.getKey(); + entry = i.next(); + u = entry.getKey(); if ((u != null) && (u.startsWith("mailto:"))) { - emaillinks.put(u.substring(7), (String)entry.getValue()); + emaillinks.put(u.substring(7), entry.getValue()); } else { extpos = u.lastIndexOf("."); if (extpos > 0) { @@ -350,10 +349,10 @@ public class plasmaParserDocument { } // add image links that we collected from the anchors to the image map - i = collectedImages.iterator(); + Iterator j = collectedImages.iterator(); htmlFilterImageEntry iEntry; - while (i.hasNext()) { - iEntry = (htmlFilterImageEntry) i.next(); + while (j.hasNext()) { + iEntry = (htmlFilterImageEntry) j.next(); if (!images.contains(iEntry)) images.add(iEntry); } diff --git a/source/de/anomic/plasma/plasmaSearchEvent.java b/source/de/anomic/plasma/plasmaSearchEvent.java index eebd323b2..218b36585 100644 --- a/source/de/anomic/plasma/plasmaSearchEvent.java +++ b/source/de/anomic/plasma/plasmaSearchEvent.java @@ -41,6 +41,7 @@ import de.anomic.index.indexRWIEntry; import de.anomic.index.indexURLEntry; import de.anomic.kelondro.kelondroBitfield; import de.anomic.kelondro.kelondroMSetTools; +import de.anomic.plasma.plasmaSnippetCache.MediaSnippet; import de.anomic.server.serverProfiling; import de.anomic.server.logging.serverLog; import de.anomic.yacy.yacyCore; @@ -70,7 +71,7 @@ public final class plasmaSearchEvent { private Map> rcAbstracts; // cache for index abstracts; word:TreeMap mapping where the embedded TreeMap is a urlhash:peerlist relation private yacySearch[] primarySearchThreads, secondarySearchThreads; private Thread localSearchThread; - private TreeMap preselectedPeerHashes; + private TreeMap preselectedPeerHashes; //private Object[] references; public TreeMap IAResults; public TreeMap IACount; @@ -79,16 +80,16 @@ public final class plasmaSearchEvent { private resultWorker[] workerThreads; private ArrayList resultList; //private int resultListLock; // a pointer that shows that all elements below this pointer are fixed and may not be changed again - private HashMap failedURLs; // a mapping from a urlhash to a fail reason string - TreeSet snippetFetchWordHashes; // a set of word hashes that are used to match with the snippets + private HashMap failedURLs; // a mapping from a urlhash to a fail reason string + TreeSet snippetFetchWordHashes; // a set of word hashes that are used to match with the snippets private long urlRetrievalAllTime; private long snippetComputationAllTime; + @SuppressWarnings("unchecked") private plasmaSearchEvent(plasmaSearchQuery query, plasmaWordIndex wordIndex, - TreeMap preselectedPeerHashes, - boolean generateAbstracts, - TreeSet abstractSet) { + TreeMap preselectedPeerHashes, + boolean generateAbstracts) { this.eventTime = System.currentTimeMillis(); // for lifetime check this.wordIndex = wordIndex; this.query = query; @@ -104,14 +105,14 @@ public final class plasmaSearchEvent { this.urlRetrievalAllTime = 0; this.snippetComputationAllTime = 0; this.workerThreads = null; - this.resultList = new ArrayList(10); // this is the result set which is filled up with search results, enriched with snippets + this.resultList = new ArrayList(10); // this is the result set which is filled up with search results, enriched with snippets //this.resultListLock = 0; // no locked elements until now - this.failedURLs = new HashMap(); // a map of urls to reason strings where a worker thread tried to work on, but failed. + this.failedURLs = new HashMap(); // a map of urls to reason strings where a worker thread tried to work on, but failed. // snippets do not need to match with the complete query hashes, // only with the query minus the stopwords which had not been used for the search - final TreeSet filtered = kelondroMSetTools.joinConstructive(query.queryHashes, plasmaSwitchboard.stopwords); - this.snippetFetchWordHashes = (TreeSet) query.queryHashes.clone(); + final TreeSet filtered = kelondroMSetTools.joinConstructive(query.queryHashes, plasmaSwitchboard.stopwords); + this.snippetFetchWordHashes = (TreeSet) query.queryHashes.clone(); if ((filtered != null) && (filtered.size() > 0)) { kelondroMSetTools.excludeDestructive(this.snippetFetchWordHashes, plasmaSwitchboard.stopwords); } @@ -163,15 +164,15 @@ public final class plasmaSearchEvent { if (generateAbstracts) { // compute index abstracts long timer = System.currentTimeMillis(); - Iterator ci = this.rankedCache.searchContainerMaps()[0].entrySet().iterator(); - Map.Entry entry; + Iterator> ci = this.rankedCache.searchContainerMaps()[0].entrySet().iterator(); + Map.Entry entry; int maxcount = -1; double mindhtdistance = 1.1, d; String wordhash; while (ci.hasNext()) { - entry = (Map.Entry) ci.next(); - wordhash = (String) entry.getKey(); - indexContainer container = (indexContainer) entry.getValue(); + entry = ci.next(); + wordhash = entry.getKey(); + indexContainer container = entry.getValue(); assert (container.getWordHash().equals(wordhash)); if (container.size() > maxcount) { IAmaxcounthash = wordhash; @@ -256,13 +257,13 @@ public final class plasmaSearchEvent { public static void cleanupEvents(boolean all) { // remove old events in the event cache - Iterator i = lastEvents.entrySet().iterator(); + Iterator i = lastEvents.values().iterator(); plasmaSearchEvent cleanEvent; while (i.hasNext()) { - cleanEvent = (plasmaSearchEvent) ((Map.Entry) i.next()).getValue(); + cleanEvent = i.next(); if ((all) || (cleanEvent.eventTime + eventLifetime < System.currentTimeMillis())) { // execute deletion of failed words - Set removeWords = cleanEvent.query.queryHashes; + Set removeWords = cleanEvent.query.queryHashes; removeWords.addAll(cleanEvent.query.excludeHashes); cleanEvent.wordIndex.removeEntriesMultiple(removeWords, cleanEvent.failedURLs.keySet()); serverLog.logInfo("SearchEvents", "cleaning up event " + cleanEvent.query.id(true) + ", removed " + cleanEvent.failedURLs.size() + " URL references on " + removeWords.size() + " words"); @@ -315,7 +316,7 @@ public final class plasmaSearchEvent { if ((query.constraint != null) && (query.constraint.get(plasmaCondenser.flag_cat_indexof)) && (!(comp.title().startsWith("Index of")))) { - final Iterator wi = query.queryHashes.iterator(); + final Iterator wi = query.queryHashes.iterator(); while (wi.hasNext()) wordIndex.removeEntry((String) wi.next(), page.hash()); registerFailure(page.hash(), "index-of constraint not fullfilled"); return null; @@ -366,7 +367,7 @@ public final class plasmaSearchEvent { } else { // attach media information startTime = System.currentTimeMillis(); - ArrayList mediaSnippets = plasmaSnippetCache.retrieveMediaSnippets(comp.url(), snippetFetchWordHashes, query.contentdom, (snippetFetchMode == 2), 6000); + ArrayList mediaSnippets = plasmaSnippetCache.retrieveMediaSnippets(comp.url(), snippetFetchWordHashes, query.contentdom, (snippetFetchMode == 2), 6000); long snippetComputationTime = System.currentTimeMillis() - startTime; serverLog.logInfo("SEARCH_EVENT", "media snippet load time for " + comp.url() + ": " + snippetComputationTime); @@ -447,13 +448,12 @@ public final class plasmaSearchEvent { public static plasmaSearchEvent getEvent(plasmaSearchQuery query, plasmaSearchRankingProfile ranking, plasmaWordIndex wordIndex, - TreeMap preselectedPeerHashes, - boolean generateAbstracts, - TreeSet abstractSet) { + TreeMap preselectedPeerHashes, + boolean generateAbstracts) { synchronized (lastEvents) { plasmaSearchEvent event = (plasmaSearchEvent) lastEvents.get(query.id(false)); if (event == null) { - event = new plasmaSearchEvent(query, wordIndex, preselectedPeerHashes, generateAbstracts, abstractSet); + event = new plasmaSearchEvent(query, wordIndex, preselectedPeerHashes, generateAbstracts); } else { //re-new the event time for this event, so it is not deleted next time too early event.eventTime = System.currentTimeMillis(); @@ -634,23 +634,23 @@ public final class plasmaSearchEvent { System.out.println("DEBUG-INDEXABSTRACT: hash " + (String) entry.getKey() + ": " + ((query.queryHashes.contains((String) entry.getKey())) ? "NEEDED" : "NOT NEEDED") + "; " + ((TreeMap) entry.getValue()).size() + " entries"); } */ - TreeMap abstractJoin = (rcAbstracts.size() == query.queryHashes.size()) ? kelondroMSetTools.joinConstructive(rcAbstracts.values(), true) : new TreeMap(); + TreeMap abstractJoin = (rcAbstracts.size() == query.queryHashes.size()) ? kelondroMSetTools.joinConstructive(rcAbstracts.values(), true) : new TreeMap(); if (abstractJoin.size() == 0) { //System.out.println("DEBUG-INDEXABSTRACT: no success using index abstracts from remote peers"); } else { //System.out.println("DEBUG-INDEXABSTRACT: index abstracts delivered " + abstractJoin.size() + " additional results for secondary search"); // generate query for secondary search - TreeMap secondarySearchURLs = new TreeMap(); // a (peerhash:urlhash-liststring) mapping - Iterator i1 = abstractJoin.entrySet().iterator(); - Map.Entry entry1; + TreeMap secondarySearchURLs = new TreeMap(); // a (peerhash:urlhash-liststring) mapping + Iterator> i1 = abstractJoin.entrySet().iterator(); + Map.Entry entry1; String url, urls, peer, peers; String mypeerhash = yacyCore.seedDB.mySeed().hash; boolean mypeerinvolved = false; int mypeercount; while (i1.hasNext()) { - entry1 = (Map.Entry) i1.next(); - url = (String) entry1.getKey(); - peers = (String) entry1.getValue(); + entry1 = i1.next(); + url = entry1.getKey(); + peers = entry1.getValue(); //System.out.println("DEBUG-INDEXABSTRACT: url " + url + ": from peers " + peers); mypeercount = 0; for (int j = 0; j < peers.length(); j = j + 12) { @@ -670,8 +670,8 @@ public final class plasmaSearchEvent { secondarySearchThreads = new yacySearch[(mypeerinvolved) ? secondarySearchURLs.size() - 1 : secondarySearchURLs.size()]; int c = 0; while (i1.hasNext()) { - entry1 = (Map.Entry) i1.next(); - peer = (String) entry1.getKey(); + entry1 = i1.next(); + peer = entry1.getKey(); if (peer.equals(mypeerhash)) continue; // we dont need to ask ourself urls = (String) entry1.getValue(); words = wordsFromPeer(peer, urls); @@ -686,17 +686,17 @@ public final class plasmaSearchEvent { } private String wordsFromPeer(String peerhash, String urls) { - Map.Entry entry; + Map.Entry> entry; String word, peerlist, url, wordlist = ""; - TreeMap urlPeerlist; + TreeMap urlPeerlist; int p; boolean hasURL; synchronized (rcAbstracts) { - Iterator i = rcAbstracts.entrySet().iterator(); + Iterator>> i = rcAbstracts.entrySet().iterator(); while (i.hasNext()) { - entry = (Map.Entry) i.next(); - word = (String) entry.getKey(); - urlPeerlist = (TreeMap) entry.getValue(); + entry = i.next(); + word = entry.getKey(); + urlPeerlist = entry.getValue(); hasURL = true; for (int j = 0; j < urls.length(); j = j + 12) { url = urls.substring(j, j + 12); diff --git a/source/de/anomic/plasma/plasmaSearchQuery.java b/source/de/anomic/plasma/plasmaSearchQuery.java index 78d4b81e2..7b1e3e4df 100644 --- a/source/de/anomic/plasma/plasmaSearchQuery.java +++ b/source/de/anomic/plasma/plasmaSearchQuery.java @@ -224,6 +224,7 @@ public plasmaSearchQuery( return kelondroMSetTools.anymatch(wordhashes, keyhashes); } + @SuppressWarnings("unchecked") public static TreeSet[] cleanQuery(String querystring) { // returns two sets: a query set and a exclude set if ((querystring == null) || (querystring.length() == 0)) return new TreeSet[]{new TreeSet(kelondroNaturalOrder.naturalComparator), new TreeSet(kelondroNaturalOrder.naturalComparator)}; diff --git a/source/de/anomic/plasma/plasmaSnippetCache.java b/source/de/anomic/plasma/plasmaSnippetCache.java index 9514567e0..d2a726e1b 100644 --- a/source/de/anomic/plasma/plasmaSnippetCache.java +++ b/source/de/anomic/plasma/plasmaSnippetCache.java @@ -64,8 +64,8 @@ import de.anomic.kelondro.kelondroMSetTools; import de.anomic.plasma.cache.IResourceInfo; import de.anomic.plasma.parser.ParserException; import de.anomic.server.logging.serverLog; -import de.anomic.yacy.yacySearch; import de.anomic.yacy.yacyCore; +import de.anomic.yacy.yacySearch; import de.anomic.yacy.yacyURL; public class plasmaSnippetCache { @@ -623,14 +623,14 @@ public class plasmaSnippetCache { } } - public static ArrayList retrieveMediaSnippets(yacyURL url, Set queryhashes, int mediatype, boolean fetchOnline, int timeout) { + public static ArrayList retrieveMediaSnippets(yacyURL url, Set queryhashes, int mediatype, boolean fetchOnline, int timeout) { if (queryhashes.size() == 0) { serverLog.logFine("snippet fetch", "no query hashes given for url " + url); - return new ArrayList(); + return new ArrayList(); } plasmaParserDocument document = retrieveDocument(url, fetchOnline, timeout, false); - ArrayList a = new ArrayList(); + ArrayList a = new ArrayList(); if (document != null) { if ((mediatype == plasmaSearchQuery.CONTENTDOM_ALL) || (mediatype == plasmaSearchQuery.CONTENTDOM_AUDIO)) a.addAll(computeMediaSnippets(document, queryhashes, plasmaSearchQuery.CONTENTDOM_AUDIO)); if ((mediatype == plasmaSearchQuery.CONTENTDOM_ALL) || (mediatype == plasmaSearchQuery.CONTENTDOM_VIDEO)) a.addAll(computeMediaSnippets(document, queryhashes, plasmaSearchQuery.CONTENTDOM_VIDEO)); @@ -640,24 +640,24 @@ public class plasmaSnippetCache { return a; } - public static ArrayList computeMediaSnippets(plasmaParserDocument document, Set queryhashes, int mediatype) { + public static ArrayList computeMediaSnippets(plasmaParserDocument document, Set queryhashes, int mediatype) { - if (document == null) return new ArrayList(); - Map media = null; + if (document == null) return new ArrayList(); + Map media = null; if (mediatype == plasmaSearchQuery.CONTENTDOM_AUDIO) media = document.getAudiolinks(); else if (mediatype == plasmaSearchQuery.CONTENTDOM_VIDEO) media = document.getVideolinks(); else if (mediatype == plasmaSearchQuery.CONTENTDOM_APP) media = document.getApplinks(); if (media == null) return null; - Iterator i = media.entrySet().iterator(); - Map.Entry entry; + Iterator> i = media.entrySet().iterator(); + Map.Entry entry; String url, desc; - Set s; - ArrayList result = new ArrayList(); + Set s; + ArrayList result = new ArrayList(); while (i.hasNext()) { - entry = (Map.Entry) i.next(); - url = (String) entry.getKey(); - desc = (String) entry.getValue(); + entry = i.next(); + url = entry.getKey(); + desc = entry.getValue(); s = removeAppearanceHashes(url, queryhashes); if (s.size() == 0) { result.add(new MediaSnippet(mediatype, url, desc, null)); @@ -672,17 +672,17 @@ public class plasmaSnippetCache { return result; } - public static ArrayList computeImageSnippets(plasmaParserDocument document, Set queryhashes) { + public static ArrayList computeImageSnippets(plasmaParserDocument document, Set queryhashes) { - TreeSet images = document.getImages(); + TreeSet images = document.getImages(); - Iterator i = images.iterator(); + Iterator i = images.iterator(); htmlFilterImageEntry ientry; String url, desc; - Set s; - ArrayList result = new ArrayList(); + Set s; + ArrayList result = new ArrayList(); while (i.hasNext()) { - ientry = (htmlFilterImageEntry) i.next(); + ientry = i.next(); url = ientry.url().toNormalform(true, true); desc = ientry.alt(); s = removeAppearanceHashes(url, queryhashes); @@ -699,17 +699,17 @@ public class plasmaSnippetCache { return result; } - private static Set removeAppearanceHashes(String sentence, Set queryhashes) { + private static Set removeAppearanceHashes(String sentence, Set queryhashes) { // remove all hashes that appear in the sentence if (sentence == null) return queryhashes; - HashMap hs = hashSentence(sentence); - Iterator j = queryhashes.iterator(); + HashMap hs = hashSentence(sentence); + Iterator j = queryhashes.iterator(); String hash; Integer pos; - Set remaininghashes = new HashSet(); + Set remaininghashes = new HashSet(); while (j.hasNext()) { - hash = (String) j.next(); - pos = (Integer) hs.get(hash); + hash = j.next(); + pos = hs.get(hash); if (pos == null) { remaininghashes.add(new String(hash)); } @@ -717,15 +717,15 @@ public class plasmaSnippetCache { return remaininghashes; } - private static HashMap hashSentence(String sentence) { + private static HashMap hashSentence(String sentence) { // generates a word-wordPos mapping - HashMap map = new HashMap(); - Enumeration words = plasmaCondenser.wordTokenizer(sentence, "UTF-8", 0); + HashMap map = new HashMap(); + Enumeration words = plasmaCondenser.wordTokenizer(sentence, "UTF-8", 0); int pos = 0; StringBuffer word; String hash; while (words.hasMoreElements()) { - word = (StringBuffer) words.nextElement(); + word = words.nextElement(); hash = plasmaCondenser.word2hash(new String(word)); if (!map.containsKey(hash)) map.put(hash, new Integer(pos)); // dont overwrite old values, that leads to too far word distances pos += word.length() + 1; diff --git a/source/de/anomic/plasma/plasmaSwitchboard.java b/source/de/anomic/plasma/plasmaSwitchboard.java index b3d7d50cc..39969f439 100644 --- a/source/de/anomic/plasma/plasmaSwitchboard.java +++ b/source/de/anomic/plasma/plasmaSwitchboard.java @@ -238,7 +238,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser public double lastrequestedQueries = 0d; public int totalPPM = 0; public double totalQPM = 0d; - public TreeMap clusterhashes; // map of peerhash(String)/alternative-local-address as ip:port or only ip (String) or null if address in seed should be used + public TreeMap clusterhashes; // map of peerhash(String)/alternative-local-address as ip:port or only ip (String) or null if address in seed should be used public boolean acceptLocalURLs, acceptGlobalURLs; public URLLicense licensedURLs; public Timer moreMemory;