more generics

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4295 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent 9d8b17188a
commit df2a7a8ac8

@ -103,7 +103,7 @@ public class Supporter {
accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.INCOMING_DB);
//accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.OUTGOING_DB);
//accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.PUBLISHED_DB);
kelondroMScoreCluster ranking = new kelondroMScoreCluster(); // score cluster for url hashes
kelondroMScoreCluster<String> ranking = new kelondroMScoreCluster<String>(); // score cluster for url hashes
kelondroRow rowdef = new kelondroRow("String url-255, String title-120, String description-120, String refid-" + (serverDate.PATTERN_SHORT_SECOND.length() + 12), kelondroNaturalOrder.naturalOrder, 0);
HashMap Supporter = new HashMap(); // a mapping from an url hash to a kelondroRow.Entry with display properties
accumulateSupporter(Supporter, ranking, rowdef, negativeHashes, positiveHashes, yacyNewsPool.INCOMING_DB);

@ -111,7 +111,7 @@ public class Surftips {
accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.INCOMING_DB);
//accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.OUTGOING_DB);
//accumulateVotes(negativeHashes, positiveHashes, yacyNewsPool.PUBLISHED_DB);
kelondroMScoreCluster ranking = new kelondroMScoreCluster(); // score cluster for url hashes
kelondroMScoreCluster<String> ranking = new kelondroMScoreCluster<String>(); // score cluster for url hashes
kelondroRow rowdef = new kelondroRow("String url-255, String title-120, String description-120, String refid-" + (serverDate.PATTERN_SHORT_SECOND.length() + 12), kelondroNaturalOrder.naturalOrder, 0);
HashMap surftips = new HashMap(); // a mapping from an url hash to a kelondroRow.Entry with display properties
accumulateSurftips(surftips, ranking, rowdef, negativeHashes, positiveHashes, yacyNewsPool.INCOMING_DB);

@ -50,7 +50,7 @@ public interface htmlFilterTransformer {
public void init(String initarg);
// ask if this transformer will do any transformation whatsoever
// this may return true if the initialization resultet in a status
// this may return true if the initialization resulted in a status
// that does not allow any transformation
public boolean isIdentityTransformer();

@ -75,7 +75,7 @@ import de.anomic.server.serverDate;
import de.anomic.yacy.yacyURL;
public final class httpHeader extends TreeMap implements Map {
public final class httpHeader extends TreeMap<String, Object> implements Map<String, Object> {
private static final long serialVersionUID = 17L;
@ -172,8 +172,8 @@ public final class httpHeader extends TreeMap implements Map {
/* =============================================================
* defining default http status messages
* ============================================================= */
public static final HashMap http0_9 = new HashMap();
public static final HashMap http1_0 = new HashMap();
public static final HashMap<String, String> http0_9 = new HashMap<String, String>();
public static final HashMap<String, String> http1_0 = new HashMap<String, String>();
static {
http1_0.putAll(http0_9);
http1_0.put("200","OK");
@ -193,7 +193,7 @@ public final class httpHeader extends TreeMap implements Map {
http1_0.put("502","Bad Gateway");
http1_0.put("503","Service Unavailable");
}
public static final HashMap http1_1 = new HashMap();
public static final HashMap<String, String> http1_1 = new HashMap<String, String>();
static {
http1_1.putAll(http1_0);
http1_1.put("100","Continue");
@ -251,63 +251,62 @@ public final class httpHeader extends TreeMap implements Map {
public static final String CONNECTION_PROP_PROXY_RESPOND_HEADER = "PROXY_RESPOND_HEADER";
public static final String CONNECTION_PROP_PROXY_RESPOND_SIZE = "PROXY_REQUEST_SIZE";
private final HashMap reverseMappingCache;
private final HashMap<String, String> reverseMappingCache;
private static final Collator insensitiveCollator = Collator.getInstance(Locale.US);
static {
insensitiveCollator.setStrength(Collator.SECONDARY);
insensitiveCollator.setDecomposition(Collator.NO_DECOMPOSITION);
insensitiveCollator.setStrength(Collator.SECONDARY);
insensitiveCollator.setDecomposition(Collator.NO_DECOMPOSITION);
}
public httpHeader() {
this(null);
this(null);
}
public httpHeader(HashMap reverseMappingCache) {
// this creates a new TreeMap with a case insesitive mapping
// to provide a put-method that translates given keys into their
// 'proper' appearance, a translation cache is needed.
// upon instantiation, such a mapping cache can be handed over
// If the reverseMappingCache is null, none is used
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
public httpHeader(HashMap<String, String> reverseMappingCache) {
// this creates a new TreeMap with a case insesitive mapping
// to provide a put-method that translates given keys into their
// 'proper' appearance, a translation cache is needed.
// upon instantiation, such a mapping cache can be handed over
// If the reverseMappingCache is null, none is used
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
}
public httpHeader(HashMap reverseMappingCache, File f) throws IOException {
// creates also a case insensitive map and loads it initially
// with some values
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
public httpHeader(HashMap<String, String> reverseMappingCache, File f) throws IOException {
// creates also a case insensitive map and loads it initially
// with some values
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
// load with data
BufferedReader br = new BufferedReader(new FileReader(f));
String line;
int pos;
while ((line = br.readLine()) != null) {
pos = line.indexOf("=");
if (pos >= 0) put(line.substring(0, pos), line.substring(pos + 1));
}
br.close();
// load with data
BufferedReader br = new BufferedReader(new FileReader(f));
String line;
int pos;
while ((line = br.readLine()) != null) {
pos = line.indexOf("=");
if (pos >= 0) put(line.substring(0, pos), line.substring(pos + 1));
}
br.close();
}
public httpHeader(HashMap reverseMappingCache, Map othermap) {
// creates a case insensitive map from another map
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
public httpHeader(HashMap<String, String> reverseMappingCache, Map<String, String> othermap) {
// creates a case insensitive map from another map
super((Collator) insensitiveCollator.clone());
this.reverseMappingCache = reverseMappingCache;
// load with data
if (othermap != null) this.putAll(othermap);
// load with data
if (othermap != null) this.putAll(othermap);
}
// we override the put method to make use of the reverseMappingCache
public Object put(Object key, Object value) {
String k = (String) key;
String upperK = k.toUpperCase();
public Object put(String key, Object value) {
String upperK = key.toUpperCase();
if (reverseMappingCache == null) {
return super.put(k, value);
return super.put(key, value);
}
if (reverseMappingCache.containsKey(upperK)) {
@ -316,13 +315,13 @@ public final class httpHeader extends TreeMap implements Map {
}
// we put in without a cached key and store the key afterwards
Object r = super.put(k, value);
reverseMappingCache.put(upperK, k);
Object r = super.put(key, value);
reverseMappingCache.put(upperK, key);
return r;
}
// to make the occurrence of multiple keys possible, we add them using a counter
public Object add(Object key, Object value) {
public Object add(String key, Object value) {
int c = keyCount((String) key);
if (c == 0) return put(key, value);
return put("*" + key + "-" + c, value);
@ -360,10 +359,10 @@ public final class httpHeader extends TreeMap implements Map {
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
Iterator i = keySet().iterator();
Iterator<String> i = keySet().iterator();
String key, value;
while (i.hasNext()) {
key = (String) i.next();
key = i.next();
value = (String) get(key);
fos.write((key + "=" + value + "\r\n").getBytes());
}
@ -734,12 +733,12 @@ public final class httpHeader extends TreeMap implements Map {
.append(httpStatusText).append("\r\n");
// write header
Iterator i = keySet().iterator();
Iterator<String> i = keySet().iterator();
String key;
char tag;
int count;
while (i.hasNext()) {
key = (String) i.next();
key = i.next();
tag = key.charAt(0);
if ((tag != '*') && (tag != '#')) { // '#' in key is reserved for proxy attributes as artificial header values
count = keyCount(key);
@ -797,7 +796,7 @@ public final class httpHeader extends TreeMap implements Map {
* Holds header properties
*/
//Since properties such as cookies can be multiple, we cannot use HashMap here. We have to use Vector.
private Vector cookies=new Vector();
private Vector<Entry> cookies = new Vector<Entry>();
/**
* Implementation of Map.Entry. Structure that hold two values - exactly what we need!
@ -912,10 +911,10 @@ public final class httpHeader extends TreeMap implements Map {
}
return "";
}
public Vector getCookieVector(){
public Vector<Entry> getCookieVector(){
return cookies;
}
public void setCookieVector(Vector mycookies){
public void setCookieVector(Vector<Entry> mycookies){
cookies=mycookies;
}
/**
@ -933,7 +932,7 @@ public final class httpHeader extends TreeMap implements Map {
* }</pre>
* @return iterator to read all request properties.
*/
public Iterator getCookies()
public Iterator<Entry> getCookies()
{
return cookies.iterator();
}

@ -116,7 +116,7 @@ public final class httpd implements serverHandler {
public static final String copyright = "[ HTTP SERVER: AnomicHTTPD v" + vDATE + " by Michael Christen / www.anomic.de ]";
public static final String hline = "-------------------------------------------------------------------------------";
public static HashMap reverseMappingCache = new HashMap();
public static HashMap<String, String> reverseMappingCache = new HashMap<String, String>();
private static plasmaSwitchboard switchboard = null;
private static String virtualHost = null;

@ -54,8 +54,8 @@ public final class indexRAMRI implements indexRI {
// class variables
private final File databaseRoot;
protected final SortedMap cache; // wordhash-container
private final kelondroMScoreCluster hashScore;
private final kelondroMScoreCluster hashDate;
private final kelondroMScoreCluster<String> hashScore;
private final kelondroMScoreCluster<String> hashDate;
private long initTime;
private int cacheMaxCount;
public int cacheReferenceCountLimit;
@ -71,8 +71,8 @@ public final class indexRAMRI implements indexRI {
// the cache has a back-end where indexes that do not fit in the cache are flushed
this.databaseRoot = databaseRoot;
this.cache = Collections.synchronizedSortedMap(new TreeMap());
this.hashScore = new kelondroMScoreCluster();
this.hashDate = new kelondroMScoreCluster();
this.hashScore = new kelondroMScoreCluster<String>();
this.hashDate = new kelondroMScoreCluster<String>();
this.initTime = System.currentTimeMillis();
this.cacheMaxCount = 10000;
this.cacheReferenceCountLimit = wCacheReferenceCountLimitInit;

@ -42,7 +42,7 @@ import de.anomic.yacy.yacyURL;
public class indexRWIEntryOrder extends kelondroAbstractOrder implements kelondroOrder {
private indexRWIVarEntry min, max;
private plasmaSearchRankingProfile ranking;
private kelondroMScoreCluster doms;
private kelondroMScoreCluster<String> doms;
private int maxdomcount;
private static final int processors = Runtime.getRuntime().availableProcessors(); // for multiprocessor support, used during normalization
@ -51,7 +51,7 @@ public class indexRWIEntryOrder extends kelondroAbstractOrder implements kelondr
this.min = null;
this.max = null;
this.ranking = profile;
this.doms = new kelondroMScoreCluster();
this.doms = new kelondroMScoreCluster<String>();
this.maxdomcount = 0;
}

@ -49,7 +49,7 @@ import java.util.Iterator;
public class kelondroCachedRA extends kelondroAbstractRA implements kelondroRA {
protected kelondroRA ra;
protected kelondroMScoreCluster cacheScore;
protected kelondroMScoreCluster<Integer> cacheScore;
protected HashMap<Integer, byte[]> cacheMemory;
private int cacheMaxElements;
private int cacheElementSize;
@ -59,7 +59,7 @@ public class kelondroCachedRA extends kelondroAbstractRA implements kelondroRA {
this.ra = ra;
this.name = ra.name();
this.cacheMemory = new HashMap<Integer, byte[]>();
this.cacheScore = new kelondroMScoreCluster();
this.cacheScore = new kelondroMScoreCluster<Integer>();
this.cacheElementSize = elementsize;
this.cacheMaxElements = cachesize / cacheElementSize;
this.seekpos = 0;
@ -90,7 +90,7 @@ public class kelondroCachedRA extends kelondroAbstractRA implements kelondroRA {
if (cache == null) {
if (cacheMemory.size() >= cacheMaxElements) {
// delete elements in buffer if buffer too big
Iterator<Object> it = cacheScore.scores(true);
Iterator<Integer> it = cacheScore.scores(true);
Integer element = (Integer) it.next();
writeCache((byte[]) cacheMemory.get(element), element.intValue());
cacheMemory.remove(element);
@ -182,9 +182,9 @@ public class kelondroCachedRA extends kelondroAbstractRA implements kelondroRA {
public void close() throws IOException {
// write all unwritten buffers
if (cacheMemory == null) return;
Iterator<Object> it = cacheScore.scores(true);
Iterator<Integer> it = cacheScore.scores(true);
while (it.hasNext()) {
Integer element = (Integer) it.next();
Integer element = it.next();
writeCache((byte[]) cacheMemory.get(element), element.intValue());
cacheMemory.remove(element);
}

@ -48,16 +48,16 @@ import java.util.Random;
import java.util.SortedMap;
import java.util.TreeMap;
public final class kelondroMScoreCluster {
public final class kelondroMScoreCluster<E> {
protected final TreeMap<Object, Long> refkeyDB; // a mapping from a reference to the cluster key
protected final TreeMap<Long, Object> keyrefDB; // a mapping from the cluster key to the reference
protected final TreeMap<E, Long> refkeyDB; // a mapping from a reference to the cluster key
protected final TreeMap<Long, E> keyrefDB; // a mapping from the cluster key to the reference
private long gcount;
private int encnt;
public kelondroMScoreCluster() {
refkeyDB = new TreeMap<Object, Long>();
keyrefDB = new TreeMap<Long, Object>();
refkeyDB = new TreeMap<E, Long>();
keyrefDB = new TreeMap<Long, E>();
gcount = 0;
encnt = 0;
}
@ -146,23 +146,23 @@ public final class kelondroMScoreCluster {
return refkeyDB.size();
}
public synchronized void incScore(Object[] objs) {
addScore(objs, 1);
public synchronized void incScore(E[] objs) {
for (int i = 0; i < objs.length; i++) addScore(objs[i], 1);
}
public synchronized void decScore(Object[] objs) {
addScore(objs, -1);
public synchronized void decScore(E[] objs) {
for (int i = 0; i < objs.length; i++) addScore(objs[i], -1);
}
public synchronized void incScore(Object obj) {
public synchronized void incScore(E obj) {
addScore(obj, 1);
}
public synchronized void decScore(Object obj) {
public synchronized void decScore(E obj) {
addScore(obj, -1);
}
public synchronized void setScore(Object obj, int newScore) {
public synchronized void setScore(E obj, int newScore) {
if (obj == null) return;
//System.out.println("setScore " + obj.getClass().getName());
Long usk = refkeyDB.remove(obj); // get unique score key, old entry is not needed any more
@ -196,7 +196,7 @@ public final class kelondroMScoreCluster {
gcount += newScore;
}
public synchronized void addScore(Object obj, int incrementScore) {
public synchronized void addScore(E obj, int incrementScore) {
if (obj == null) return;
//System.out.println("setScore " + obj.getClass().getName());
Long usk = refkeyDB.remove(obj); // get unique score key, old entry is not needed any more
@ -232,7 +232,7 @@ public final class kelondroMScoreCluster {
gcount += incrementScore;
}
public synchronized int deleteScore(Object obj) {
public synchronized int deleteScore(E obj) {
// deletes entry and returns previous score
if (obj == null) return 0;
//System.out.println("setScore " + obj.getClass().getName());
@ -251,11 +251,11 @@ public final class kelondroMScoreCluster {
return oldScore;
}
public synchronized boolean existsScore(Object obj) {
public synchronized boolean existsScore(E obj) {
return (refkeyDB.get(obj) != null);
}
public synchronized int getScore(Object obj) {
public synchronized int getScore(E obj) {
if (obj == null) return 0;
Long cs = refkeyDB.get(obj);
if (cs == null) return 0;
@ -284,19 +284,20 @@ public final class kelondroMScoreCluster {
return keyrefDB.get(keyrefDB.firstKey());
}
public synchronized Object[] getScores(int maxCount, boolean up) {
public synchronized E[] getScores(int maxCount, boolean up) {
return getScores(maxCount, up, Integer.MIN_VALUE, Integer.MAX_VALUE);
}
public synchronized Object[] getScores(int maxCount, boolean up, int minScore, int maxScore) {
@SuppressWarnings("unchecked")
public synchronized E[] getScores(int maxCount, boolean up, int minScore, int maxScore) {
if (maxCount > refkeyDB.size()) maxCount = refkeyDB.size();
Object[] s = new Object[maxCount];
Iterator<Object> it = scores(up, minScore, maxScore);
E[] s = (E[]) new Object[maxCount];
Iterator<E> it = scores(up, minScore, maxScore);
int i = 0;
while ((i < maxCount) && (it.hasNext())) s[i++] = it.next();
if (i < maxCount) {
// re-copy the result array
Object[] sc = new Object[i];
E[] sc = (E[]) new Object[i];
System.arraycopy(s, 0, sc, 0, i);
s = sc;
sc = null;
@ -308,20 +309,20 @@ public final class kelondroMScoreCluster {
return refkeyDB + " / " + keyrefDB;
}
public synchronized Iterator<Object> scores(boolean up) {
public synchronized Iterator<E> scores(boolean up) {
if (up) return new simpleScoreIterator();
return new reverseScoreIterator();
}
public synchronized Iterator<Object> scores(boolean up, int minScore, int maxScore) {
public synchronized Iterator<E> scores(boolean up, int minScore, int maxScore) {
return new komplexScoreIterator(up, minScore, maxScore);
}
private class komplexScoreIterator implements Iterator<Object> {
private class komplexScoreIterator implements Iterator<E> {
boolean up;
TreeMap<Long, Object> keyrefDBcopy;
Object n;
TreeMap<Long, E> keyrefDBcopy;
E n;
int min, max;
@SuppressWarnings("unchecked")
@ -329,7 +330,7 @@ public final class kelondroMScoreCluster {
this.up = up;
this.min = minScore;
this.max = maxScore;
this.keyrefDBcopy = (TreeMap<Long, Object>) keyrefDB.clone(); // NoSuchElementException here?
this.keyrefDBcopy = (TreeMap<Long, E>) keyrefDB.clone(); // NoSuchElementException here?
internalNext();
}
@ -342,11 +343,11 @@ public final class kelondroMScoreCluster {
int score = (max + min) / 2;
while (keyrefDBcopy.size() > 0) {
key = (Long) ((up) ? keyrefDBcopy.firstKey() : keyrefDBcopy.lastKey());
n = keyrefDBcopy.remove(key);
n = (E) keyrefDBcopy.remove(key);
score = (int) ((key.longValue() & 0xFFFFFFFF00000000L) >> 32);
if ((score >= min) && (score <= max)) return;
if (((up) && (score > max)) || ((!(up)) && (score < min))) {
keyrefDBcopy = new TreeMap<Long, Object>();
keyrefDBcopy = new TreeMap<Long, E>();
n = null;
return;
}
@ -354,8 +355,8 @@ public final class kelondroMScoreCluster {
n = null;
}
public Object next() {
Object o = n;
public E next() {
E o = n;
internalNext();
return o;
}
@ -366,9 +367,9 @@ public final class kelondroMScoreCluster {
}
private class reverseScoreIterator implements Iterator<Object> {
private class reverseScoreIterator implements Iterator<E> {
SortedMap<Long, Object> view;
SortedMap<Long, E> view;
Long key;
public reverseScoreIterator() {
@ -379,10 +380,10 @@ public final class kelondroMScoreCluster {
return view.size() > 0;
}
public Object next() {
public E next() {
key = view.lastKey();
view = view.headMap(key);
Object value = keyrefDB.get(key);
E value = keyrefDB.get(key);
//System.out.println("cluster reverse iterator: score = " + ((((Long) key).longValue() & 0xFFFFFFFF00000000L) >> 32) + ", handle = " + (((Long) key).longValue() & 0xFFFFFFFFL) + ", value = " + value);
return value;
}
@ -394,10 +395,10 @@ public final class kelondroMScoreCluster {
}
private class simpleScoreIterator implements Iterator<Object> {
private class simpleScoreIterator implements Iterator<E> {
Iterator<Map.Entry<Long, Object>> ii;
Map.Entry<Long, Object> entry;
Iterator<Map.Entry<Long, E>> ii;
Map.Entry<Long, E> entry;
public simpleScoreIterator() {
ii = keyrefDB.entrySet().iterator();
@ -407,7 +408,7 @@ public final class kelondroMScoreCluster {
return ii.hasNext();
}
public Object next() {
public E next() {
entry = ii.next();
//System.out.println("cluster simple iterator: score = " + ((((Long) entry.getKey()).longValue() & 0xFFFFFFFF00000000L) >> 32) + ", handle = " + (((Long) entry.getKey()).longValue() & 0xFFFFFFFFL) + ", value = " + entry.getValue());
return entry.getValue();
@ -430,7 +431,7 @@ public final class kelondroMScoreCluster {
}
System.out.println("Test for Score: start");
kelondroMScoreCluster s = new kelondroMScoreCluster();
kelondroMScoreCluster<String> s = new kelondroMScoreCluster<String>();
long c = 0;
// create cluster

@ -81,25 +81,25 @@ public class kelondroMSetTools {
// We distinguish two principal solutions
// - constructive join (generate new data structure)
// - destructive join (remove non-valid elements from given data structure)
// The alogorithm to perform the join can be also of two kind:
// - join by pairvise enumeration
// The algorithm to perform the join can be also of two kind:
// - join by pairwise enumeration
// - join by iterative tests (where we distinguish left-right and right-left tests)
public static TreeMap joinConstructive(Collection maps, boolean concatStrings) {
public static <A, B> TreeMap<A, B> joinConstructive(Collection<TreeMap<A, B>> maps, boolean concatStrings) {
// this joins all TreeMap(s) contained in maps
// first order entities by their size
TreeMap orderMap = new TreeMap();
TreeMap singleMap;
Iterator i = maps.iterator();
TreeMap<Long, TreeMap<A, B>> orderMap = new TreeMap<Long, TreeMap<A, B>>();
TreeMap<A, B> singleMap;
Iterator<TreeMap<A, B>> i = maps.iterator();
int count = 0;
while (i.hasNext()) {
// get next entity:
singleMap = (TreeMap) i.next();
singleMap = i.next();
// check result
if ((singleMap == null) || (singleMap.size() == 0)) return new TreeMap();
if ((singleMap == null) || (singleMap.size() == 0)) return new TreeMap<A, B>();
// store result in order of result size
orderMap.put(new Long(singleMap.size() * 1000 + count), singleMap);
@ -107,16 +107,16 @@ public class kelondroMSetTools {
}
// check if there is any result
if (orderMap.size() == 0) return new TreeMap();
if (orderMap.size() == 0) return new TreeMap<A, B>();
// we now must pairwise build up a conjunction of these maps
Long k = (Long) orderMap.firstKey(); // the smallest, which means, the one with the least entries
TreeMap mapA, mapB, joinResult = (TreeMap) orderMap.remove(k);
TreeMap<A, B> mapA, mapB, joinResult = (TreeMap<A, B>) orderMap.remove(k);
while ((orderMap.size() > 0) && (joinResult.size() > 0)) {
// take the first element of map which is a result and combine it with result
k = (Long) orderMap.firstKey(); // the next smallest...
mapA = joinResult;
mapB = (TreeMap) orderMap.remove(k);
mapB = (TreeMap<A, B>) orderMap.remove(k);
joinResult = joinConstructiveByTest(mapA, mapB, concatStrings); // TODO: better with enumeration?
// free resources
mapA = null;
@ -124,15 +124,15 @@ public class kelondroMSetTools {
}
// in 'searchResult' is now the combined search result
if (joinResult.size() == 0) return new TreeMap();
if (joinResult.size() == 0) return new TreeMap<A, B>();
return joinResult;
}
public static TreeMap joinConstructive(TreeMap map1, TreeMap map2, boolean concatStrings) {
public static <A, B> TreeMap<A, B> joinConstructive(TreeMap<A, B> map1, TreeMap<A, B> map2, boolean concatStrings) {
// comparators must be equal
if ((map1 == null) || (map2 == null)) return null;
if (map1.comparator() != map2.comparator()) return null;
if ((map1.size() == 0) || (map2.size() == 0)) return new TreeMap(map1.comparator());
if ((map1.size() == 0) || (map2.size() == 0)) return new TreeMap<A, B>(map1.comparator());
// decide which method to use
int high = ((map1.size() > map2.size()) ? map1.size() : map2.size());
@ -148,39 +148,49 @@ public class kelondroMSetTools {
return joinConstructiveByEnumeration(map1, map2, concatStrings);
}
private static TreeMap joinConstructiveByTest(TreeMap small, TreeMap large, boolean concatStrings) {
Iterator mi = small.entrySet().iterator();
TreeMap result = new TreeMap(large.comparator());
Map.Entry mentry1;
private static <A, B> TreeMap<A, B> joinConstructiveByTest(TreeMap<A, B> small, TreeMap<A, B> large, boolean concatStrings) {
Iterator<Map.Entry<A, B>> mi = small.entrySet().iterator();
TreeMap<A, B> result = new TreeMap<A, B>(large.comparator());
Map.Entry<A, B> mentry1;
Object mobj2;
while (mi.hasNext()) {
mentry1 = (Map.Entry) mi.next();
mentry1 = mi.next();
mobj2 = large.get(mentry1.getKey());
if (mobj2 != null) result.put(mentry1.getKey(), (concatStrings) ? ((String) mentry1.getValue() + (String) mobj2) : mentry1.getValue());
if (mobj2 != null) {
if (mentry1.getValue() instanceof String) {
result.put(mentry1.getKey(), (B) ((concatStrings) ? ((String) mentry1.getValue() + (String) mobj2) : (String) mentry1.getValue()));
} else {
result.put(mentry1.getKey(), mentry1.getValue());
}
}
}
return result;
}
private static TreeMap joinConstructiveByEnumeration(TreeMap map1, TreeMap map2, boolean concatStrings) {
// implement pairvise enumeration
Comparator comp = map1.comparator();
Iterator mi1 = map1.entrySet().iterator();
Iterator mi2 = map2.entrySet().iterator();
TreeMap result = new TreeMap(map1.comparator());
private static <A, B> TreeMap<A, B> joinConstructiveByEnumeration(TreeMap<A, B> map1, TreeMap<A, B> map2, boolean concatStrings) {
// implement pairwise enumeration
Comparator<? super A> comp = map1.comparator();
Iterator<Map.Entry<A, B>> mi1 = map1.entrySet().iterator();
Iterator<Map.Entry<A, B>> mi2 = map2.entrySet().iterator();
TreeMap<A, B> result = new TreeMap<A, B>(map1.comparator());
int c;
if ((mi1.hasNext()) && (mi2.hasNext())) {
Map.Entry mentry1 = (Map.Entry) mi1.next();
Map.Entry mentry2 = (Map.Entry) mi2.next();
Map.Entry<A, B> mentry1 = mi1.next();
Map.Entry<A, B> mentry2 = mi2.next();
while (true) {
c = compare(mentry1.getKey(), mentry2.getKey(), comp);
if (c < 0) {
if (mi1.hasNext()) mentry1 = (Map.Entry) mi1.next(); else break;
if (mi1.hasNext()) mentry1 = mi1.next(); else break;
} else if (c > 0) {
if (mi2.hasNext()) mentry2 = (Map.Entry) mi2.next(); else break;
if (mi2.hasNext()) mentry2 = mi2.next(); else break;
} else {
result.put(mentry1.getKey(), (concatStrings) ? ((String) mentry1.getValue() + (String) mentry2.getValue()) : mentry1.getValue());
if (mi1.hasNext()) mentry1 = (Map.Entry) mi1.next(); else break;
if (mi2.hasNext()) mentry2 = (Map.Entry) mi2.next(); else break;
if (mentry1.getValue() instanceof String) {
result.put(mentry1.getKey(), (B) ((concatStrings) ? ((String) mentry1.getValue() + (String) mentry2.getValue()) : (String) mentry1.getValue()));
} else {
result.put(mentry1.getKey(), mentry1.getValue());
}
if (mi1.hasNext()) mentry1 = mi1.next(); else break;
if (mi2.hasNext()) mentry2 = mi2.next(); else break;
}
}
}

@ -37,8 +37,8 @@ import java.util.Map;
public class kelondroMapObjects extends kelondroObjects {
private String[] sortfields, longaccfields, doubleaccfields;
private HashMap sortClusterMap; // a String-kelondroMScoreCluster - relation
private HashMap accMap; // to store accumulations of specific fields
private HashMap<String, kelondroMScoreCluster<String>> sortClusterMap; // a String-kelondroMScoreCluster - relation
private HashMap<String, Object> accMap; // to store accumulations of specific fields
private int elementCount;
public kelondroMapObjects(kelondroDyn dyn, int cachesize) {
@ -53,12 +53,12 @@ public class kelondroMapObjects extends kelondroObjects {
this.longaccfields = longaccfields;
this.doubleaccfields = doubleaccfields;
kelondroMScoreCluster[] cluster = null;
kelondroMScoreCluster<String>[] cluster = null;
if (sortfields == null) sortClusterMap = null; else {
sortClusterMap = new HashMap();
sortClusterMap = new HashMap<String, kelondroMScoreCluster<String>>();
cluster = new kelondroMScoreCluster[sortfields.length];
for (int i = 0; i < sortfields.length; i++) {
cluster[i] = new kelondroMScoreCluster();
cluster[i] = new kelondroMScoreCluster<String>();
}
}
@ -67,7 +67,7 @@ public class kelondroMapObjects extends kelondroObjects {
if ((longaccfields == null) && (doubleaccfields == null)) {
accMap = null;
} else {
accMap = new HashMap();
accMap = new HashMap<String, Object>();
if (longaccfields != null) {
longaccumulator = new Long[longaccfields.length];
for (int i = 0; i < longaccfields.length; i++) {
@ -84,15 +84,15 @@ public class kelondroMapObjects extends kelondroObjects {
// fill cluster and accumulator with values
if ((sortfields != null) || (longaccfields != null) || (doubleaccfields != null)) try {
kelondroCloneableIterator it = dyn.dynKeys(true, false);
kelondroCloneableIterator<String> it = dyn.dynKeys(true, false);
String mapname;
Object cell;
long valuel;
double valued;
Map map;
Map<String, String> map;
this.elementCount = 0;
while (it.hasNext()) {
mapname = (String) it.next();
mapname = it.next();
map = getMap(mapname);
if (map == null) break;
@ -147,16 +147,16 @@ public class kelondroMapObjects extends kelondroObjects {
public void reset() throws IOException {
super.reset();
if (sortfields == null) sortClusterMap = null; else {
sortClusterMap = new HashMap();
sortClusterMap = new HashMap<String, kelondroMScoreCluster<String>>();
for (int i = 0; i < sortfields.length; i++) {
sortClusterMap.put(sortfields[i], new kelondroMScoreCluster());
sortClusterMap.put(sortfields[i], new kelondroMScoreCluster<String>());
}
}
if ((longaccfields == null) && (doubleaccfields == null)) {
accMap = null;
} else {
accMap = new HashMap();
accMap = new HashMap<String, Object>();
if (longaccfields != null) {
for (int i = 0; i < longaccfields.length; i++) {
accMap.put(longaccfields[i], new Long(0));
@ -171,14 +171,14 @@ public class kelondroMapObjects extends kelondroObjects {
this.elementCount = 0;
}
public synchronized void set(String key, Map newMap) throws IOException {
public synchronized void set(String key, Map<String, String> newMap) throws IOException {
assert (key != null);
assert (key.length() > 0);
assert (newMap != null);
// update elementCount
if ((longaccfields != null) || (doubleaccfields != null)) {
final Map oldMap = getMap(key, false);
final Map<String, String> oldMap = getMap(key, false);
if (oldMap == null) {
// new element
elementCount++;
@ -197,14 +197,14 @@ public class kelondroMapObjects extends kelondroObjects {
if ((longaccfields != null) || (doubleaccfields != null)) updateAcc(newMap, true);
}
private void updateAcc(Map map, boolean add) {
private void updateAcc(Map<String, String> map, boolean add) {
String value;
long valuel;
double valued;
Long longaccumulator;
Double doubleaccumulator;
if (longaccfields != null) for (int i = 0; i < longaccfields.length; i++) {
value = (String) map.get(longaccfields[i]);
value = map.get(longaccfields[i]);
if (value != null) {
try {
valuel = Long.parseLong(value);
@ -218,7 +218,7 @@ public class kelondroMapObjects extends kelondroObjects {
}
}
if (doubleaccfields != null) for (int i = 0; i < doubleaccfields.length; i++) {
value = (String) map.get(doubleaccfields[i]);
value = map.get(doubleaccfields[i]);
if (value != null) {
try {
valued = Double.parseDouble(value);
@ -233,13 +233,13 @@ public class kelondroMapObjects extends kelondroObjects {
}
}
private void updateSortCluster(final String key, final Map map) {
private void updateSortCluster(final String key, final Map<String, String> map) {
Object cell;
kelondroMScoreCluster cluster;
kelondroMScoreCluster<String> cluster;
for (int i = 0; i < sortfields.length; i++) {
cell = map.get(sortfields[i]);
if (cell != null) {
cluster = (kelondroMScoreCluster) sortClusterMap.get(sortfields[i]);
cluster = (kelondroMScoreCluster<String>) sortClusterMap.get(sortfields[i]);
cluster.setScore(key, kelondroMScoreCluster.object2score(cell));
sortClusterMap.put(sortfields[i], cluster);
}
@ -251,7 +251,7 @@ public class kelondroMapObjects extends kelondroObjects {
// update elementCount
if ((sortfields != null) || (longaccfields != null) || (doubleaccfields != null)) {
final Map map = getMap(key);
final Map<String, String> map = getMap(key);
if (map != null) {
// update count
elementCount--;
@ -266,7 +266,7 @@ public class kelondroMapObjects extends kelondroObjects {
super.remove(key);
}
public Map getMap(String key) {
public Map<String, String> getMap(String key) {
try {
kelondroObjectsMapEntry mapEntry = (kelondroObjectsMapEntry) super.get(key);
if (mapEntry == null) return null;
@ -277,7 +277,7 @@ public class kelondroMapObjects extends kelondroObjects {
}
}
protected Map getMap(String key, boolean cache) {
protected Map<String, String> getMap(String key, boolean cache) {
try {
kelondroObjectsMapEntry mapEntry = (kelondroObjectsMapEntry) super.get(key, cache);
if (mapEntry == null) return null;
@ -290,18 +290,18 @@ public class kelondroMapObjects extends kelondroObjects {
private void deleteSortCluster(final String key) {
if (key == null) return;
kelondroMScoreCluster cluster;
kelondroMScoreCluster<String> cluster;
for (int i = 0; i < sortfields.length; i++) {
cluster = (kelondroMScoreCluster) sortClusterMap.get(sortfields[i]);
cluster = sortClusterMap.get(sortfields[i]);
cluster.deleteScore(key);
sortClusterMap.put(sortfields[i], cluster);
}
}
public synchronized Iterator keys(final boolean up, /* sorted by */ String field) {
public synchronized Iterator<String> keys(final boolean up, /* sorted by */ String field) {
// sorted iteration using the sortClusters
if (sortClusterMap == null) return null;
final kelondroMScoreCluster cluster = (kelondroMScoreCluster) sortClusterMap.get(field);
final kelondroMScoreCluster<String> cluster = sortClusterMap.get(field);
if (cluster == null) return null; // sort field does not exist
//System.out.println("DEBUG: cluster for field " + field + ": " + cluster.toString());
return cluster.scores(up);
@ -346,14 +346,14 @@ public class kelondroMapObjects extends kelondroObjects {
super.close();
}
public class mapIterator implements Iterator {
public class mapIterator implements Iterator<Map<String, String>> {
// enumerates Map-Type elements
// the key is also included in every map that is returned; it's key is 'key'
Iterator keyIterator;
Iterator<String> keyIterator;
boolean finish;
public mapIterator(Iterator keyIterator) {
public mapIterator(Iterator<String> keyIterator) {
this.keyIterator = keyIterator;
this.finish = false;
}
@ -362,11 +362,11 @@ public class kelondroMapObjects extends kelondroObjects {
return (!(finish)) && (keyIterator != null) && (keyIterator.hasNext());
}
public Object next() {
public Map<String, String> next() {
String nextKey;
Map map;
Map<String, String> map;
while (keyIterator.hasNext()) {
nextKey = (String) keyIterator.next();
nextKey = keyIterator.next();
if (nextKey == null) {
finish = true;
return null;

@ -34,7 +34,7 @@ import java.util.Iterator;
public class kelondroObjects {
private kelondroDyn dyn;
private kelondroMScoreCluster cacheScore;
private kelondroMScoreCluster<String> cacheScore;
private HashMap<String, kelondroObjectsEntry> cache;
private long startup;
private int cachesize;
@ -43,7 +43,7 @@ public class kelondroObjects {
public kelondroObjects(kelondroDyn dyn, int cachesize) {
this.dyn = dyn;
this.cache = new HashMap<String, kelondroObjectsEntry>();
this.cacheScore = new kelondroMScoreCluster();
this.cacheScore = new kelondroMScoreCluster<String>();
this.startup = System.currentTimeMillis();
this.cachesize = cachesize;
}
@ -51,7 +51,7 @@ public class kelondroObjects {
public void reset() throws IOException {
this.dyn.reset();
this.cache = new HashMap<String, kelondroObjectsEntry>();
this.cacheScore = new kelondroMScoreCluster();
this.cacheScore = new kelondroMScoreCluster<String>();
}
public int keySize() {

@ -33,13 +33,13 @@ import java.util.Map;
public class kelondroObjectsMapEntry implements kelondroObjectsEntry {
protected Map entry;
protected Map<String, String> entry;
public kelondroObjectsMapEntry() {
this.entry = new HashMap();
this.entry = new HashMap<String, String>();
}
public kelondroObjectsMapEntry(Map map) {
public kelondroObjectsMapEntry(Map<String, String> map) {
this.entry = map;
}
@ -63,7 +63,7 @@ public class kelondroObjectsMapEntry implements kelondroObjectsEntry {
}
}
public Map map() {
public Map<String, String> map() {
return this.entry;
}

@ -167,8 +167,8 @@ public class kelondroSQLTable implements kelondroIndex {
}
}
public synchronized void putMultiple(List rows) throws IOException {
Iterator i = rows.iterator();
public synchronized void putMultiple(List<kelondroRow.Entry> rows) throws IOException {
Iterator<kelondroRow.Entry> i = rows.iterator();
while (i.hasNext()) put((Entry) i.next());
}
@ -212,7 +212,7 @@ public class kelondroSQLTable implements kelondroIndex {
throw new UnsupportedOperationException();
}
public synchronized void addUniqueMultiple(List rows) throws IOException {
public synchronized void addUniqueMultiple(List<kelondroRow.Entry> rows) throws IOException {
throw new UnsupportedOperationException();
}
@ -242,12 +242,12 @@ public class kelondroSQLTable implements kelondroIndex {
return null;
}
public kelondroCloneableIterator rows(boolean up, byte[] startKey) throws IOException {
public kelondroCloneableIterator<kelondroRow.Entry> rows(boolean up, byte[] startKey) throws IOException {
// Objects are of type kelondroRow.Entry
return null;
}
public kelondroCloneableIterator keys(boolean up, byte[] startKey) {
public kelondroCloneableIterator<byte[]> keys(boolean up, byte[] startKey) {
// Objects are of type byte[]
return null;
}

@ -58,7 +58,7 @@ public final class kelondroStack extends kelondroEcoRecords {
// define the Over-Head-Array
private static short thisOHBytes = 0; // our record definition does not need extra bytes
private static short thisOHHandles = 2; // and two handles overhead for a double-chained list
private static short thisFHandles = 2; // two file handles for root handle and handle to last lement
private static short thisFHandles = 2; // two file handles for root handle and handle to last element
// define pointers for OH array access
protected static final int left = 0; // pointer for OHHandle-array: handle()-Value of left child Node
@ -106,13 +106,13 @@ public final class kelondroStack extends kelondroEcoRecords {
return open(f, row);
}
public Iterator stackIterator(boolean up) {
public Iterator<kelondroRow.Entry> stackIterator(boolean up) {
// iterates the elements in an ordered way.
// returns kelondroRow.Entry - type Objects
return new stackIterator(up);
}
public class stackIterator implements Iterator {
public class stackIterator implements Iterator<kelondroRow.Entry> {
kelondroHandle nextHandle = null;
kelondroHandle lastHandle = null;
boolean up;
@ -126,7 +126,7 @@ public final class kelondroStack extends kelondroEcoRecords {
return (nextHandle != null);
}
public Object next() {
public kelondroRow.Entry next() {
lastHandle = nextHandle;
try {
nextHandle = new EcoNode(nextHandle).getOHHandle((up) ? right : left);
@ -306,10 +306,10 @@ public final class kelondroStack extends kelondroEcoRecords {
public void print() throws IOException {
super.print();
Iterator it = stackIterator(true);
Iterator<kelondroRow.Entry> it = stackIterator(true);
kelondroRow.Entry r;
while (it.hasNext()) {
r = (kelondroRow.Entry) it.next();
r = it.next();
System.out.print(" KEY:'" + r.getColString(0, null) + "'");
for (int j = 1; j < row().columns(); j++)
System.out.print(", V[" + j + "]:'" + r.getColString(j, null) + "'");

@ -56,7 +56,7 @@ import java.util.Hashtable;
public class kelondroXMLTables {
private Hashtable tables;
private Hashtable<String, Hashtable<String, String>> tables;
// tables is a hashtable that contains hashtables as values in the table
private File propFile;
@ -65,18 +65,19 @@ public class kelondroXMLTables {
public kelondroXMLTables() {
this.propFile = null;
this.timestamp = System.currentTimeMillis();
this.tables = new Hashtable();
this.tables = new Hashtable<String, Hashtable<String, String>>();
}
@SuppressWarnings("unchecked")
public kelondroXMLTables(File file) throws IOException {
this.propFile = file;
this.timestamp = System.currentTimeMillis();
if (propFile.exists()) {
XMLDecoder xmldec = new XMLDecoder(new FileInputStream(propFile));
tables = (Hashtable) xmldec.readObject();
tables = (Hashtable<String, Hashtable<String, String>>) xmldec.readObject();
xmldec.close();
} else {
tables = new Hashtable();
tables = new Hashtable<String, Hashtable<String, String>>();
}
}
@ -120,29 +121,25 @@ public class kelondroXMLTables {
public int sizeTable(String table) {
// returns number of entries in table; if table does not exist -1
Hashtable l = (Hashtable) tables.get(table);
if (l == null)
return -1;
Hashtable<String, String> l = tables.get(table);
if (l == null) return -1;
return l.size();
}
public void createTable(String table) throws IOException {
// creates a new table
Hashtable l = (Hashtable) tables.get(table);
Hashtable<String, String> l = tables.get(table);
if (l != null)
return; // we do not overwite
tables.put(table, new Hashtable());
if (this.propFile != null)
commit(false);
tables.put(table, new Hashtable<String, String>());
if (this.propFile != null) commit(false);
}
public void set(String table, String key, String value) throws IOException {
if (table != null) {
Hashtable l = (Hashtable) tables.get(table);
if (l == null)
throw new RuntimeException("Microtables.set: table does not exist");
if (value == null)
value = "";
Hashtable<String, String> l = tables.get(table);
if (l == null) throw new RuntimeException("Microtables.set: table does not exist");
if (value == null) value = "";
l.put(key, value);
}
if (this.propFile != null)
@ -151,7 +148,7 @@ public class kelondroXMLTables {
public String get(String table, String key, String deflt) {
if (table != null) {
Hashtable l = (Hashtable) tables.get(table);
Hashtable<String, String> l = tables.get(table);
if (l == null)
throw new RuntimeException("Microtables.get: table does not exist");
if (l.containsKey(key))
@ -164,7 +161,7 @@ public class kelondroXMLTables {
public boolean has(String table, String key) {
if (table != null) {
Hashtable l = (Hashtable) tables.get(table);
Hashtable<String, String> l = tables.get(table);
if (l == null)
throw new RuntimeException("Microtables.has: table does not exist");
return (l.containsKey(key));
@ -172,9 +169,9 @@ public class kelondroXMLTables {
return false;
}
public Enumeration keys(String table) {
public Enumeration<String> keys(String table) {
if (table != null) {
Hashtable l = (Hashtable) tables.get(table);
Hashtable<String, String> l = tables.get(table);
if (l == null)
throw new RuntimeException("Microtables.keys: table does not exist");
return l.keys();

@ -107,8 +107,8 @@ public final class plasmaCondenser {
private final static int numlength = 5;
//private Properties analysis;
private TreeMap words; // a string (the words) to (wordStatProp) - relation
private HashMap sentences;
private TreeMap<String, wordStatProp> words; // a string (the words) to (wordStatProp) - relation
private HashMap<StringBuffer, phraseStatProp> sentences;
private int wordminsize;
private int wordcut;
@ -124,8 +124,8 @@ public final class plasmaCondenser {
// added media words are flagged with the appropriate media flag
this.wordminsize = 3;
this.wordcut = 2;
this.words = new TreeMap();
this.sentences = new HashMap();
this.words = new TreeMap<String, wordStatProp>();
this.sentences = new HashMap<StringBuffer, phraseStatProp>();
this.RESULT_FLAGS = new kelondroBitfield(4);
//System.out.println("DEBUG: condensing " + document.getMainLongTitle() + ", indexText=" + Boolean.toString(indexText) + ", indexMedia=" + Boolean.toString(indexMedia));
@ -205,14 +205,15 @@ public final class plasmaCondenser {
}
// finally check all words for missing flag entry
i = words.entrySet().iterator();
Iterator<Map.Entry<String, wordStatProp>> j = words.entrySet().iterator();
wordStatProp wprop;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
wprop = (wordStatProp) entry.getValue();
Map.Entry<String, wordStatProp> we;
while (j.hasNext()) {
we = j.next();
wprop = (wordStatProp) we.getValue();
if (wprop.flags == null) {
wprop.flags = (kelondroBitfield) RESULT_FLAGS.clone();
words.put(entry.getKey(), wprop);
words.put(we.getKey(), wprop);
}
}
}
@ -255,8 +256,8 @@ public final class plasmaCondenser {
this.wordminsize = wordminsize;
this.wordcut = wordcut;
// analysis = new Properties();
words = new TreeMap();
sentences = new HashMap();
words = new TreeMap<String, wordStatProp>();
sentences = new HashMap<StringBuffer, phraseStatProp>();
createCondensement(text, charset);
}
@ -265,8 +266,8 @@ public final class plasmaCondenser {
return kelondroBase64Order.enhancedCoder.encode(serverCodings.encodeMD5Raw(word.toLowerCase())).substring(0, yacySeedDB.commonHashLength);
}
public static final Set words2hashSet(String[] words) {
TreeSet hashes = new TreeSet(kelondroBase64Order.enhancedCoder);
public static final Set<String> words2hashSet(String[] words) {
TreeSet<String> hashes = new TreeSet<String>(kelondroBase64Order.enhancedCoder);
for (int i = 0; i < words.length; i++) hashes.add(word2hash(words[i]));
return hashes;
}
@ -277,14 +278,14 @@ public final class plasmaCondenser {
return new String(sb);
}
public static final TreeSet words2hashes(Set words) {
Iterator i = words.iterator();
TreeSet hashes = new TreeSet(kelondroBase64Order.enhancedCoder);
while (i.hasNext()) hashes.add(word2hash((String) i.next()));
public static final TreeSet<String> words2hashes(Set<String> words) {
Iterator<String> i = words.iterator();
TreeSet<String> hashes = new TreeSet<String>(kelondroBase64Order.enhancedCoder);
while (i.hasNext()) hashes.add(word2hash(i.next()));
return hashes;
}
public int excludeWords(TreeSet stopwords) {
public int excludeWords(TreeSet<String> stopwords) {
// subtracts the given stopwords from the word list
// the word list shrinkes. This returns the number of shrinked words
int oldsize = words.size();
@ -292,12 +293,12 @@ public final class plasmaCondenser {
return oldsize - words.size();
}
public Map words() {
public Map<String, wordStatProp> words() {
// returns the words as word/wordStatProp relation map
return words;
}
public Map sentences() {
public Map<StringBuffer, phraseStatProp> sentences() {
return sentences;
}

@ -514,7 +514,7 @@ public final class plasmaHTCache {
public static void cacheScan() {
log.logConfig("STARTING HTCACHE SCANNING");
kelondroMScoreCluster doms = new kelondroMScoreCluster();
kelondroMScoreCluster<String> doms = new kelondroMScoreCluster<String>();
int fileCount = 0;
enumerateFiles fileEnum = new enumerateFiles(cachePath, true, false, true, true);
File dbfile = new File(cachePath, "responseHeader.db");

@ -63,7 +63,7 @@ public final class plasmaSearchRankingProcess {
private int globalcount;
private indexRWIEntryOrder order;
private HashMap<String, Object> urlhashes; // map for double-check; String/Long relation, addresses ranking number (backreference for deletion)
private kelondroMScoreCluster ref; // reference score computation for the commonSense heuristic
private kelondroMScoreCluster<String> ref; // reference score computation for the commonSense heuristic
private int[] flagcount; // flag counter
private TreeSet<String> misses; // contains url-hashes that could not been found in the LURL-DB
private plasmaWordIndex wordIndex;
@ -84,7 +84,7 @@ public final class plasmaSearchRankingProcess {
this.maxentries = maxentries;
this.globalcount = 0;
this.urlhashes = new HashMap<String, Object>();
this.ref = new kelondroMScoreCluster();
this.ref = new kelondroMScoreCluster<String>();
this.misses = new TreeSet<String>();
this.wordIndex = wordIndex;
this.sortorder = sortorder;
@ -254,7 +254,7 @@ public final class plasmaSearchRankingProcess {
return false;
}
public synchronized Map[] searchContainerMaps() {
public synchronized Map<String, indexContainer>[] searchContainerMaps() {
// direct access to the result maps is needed for abstract generation
// this is only available if execQuery() was called before
return localSearchContainerMaps;
@ -269,7 +269,7 @@ public final class plasmaSearchRankingProcess {
private synchronized Object[] /*{Object, indexRWIEntry}*/ bestRWI(boolean skipDoubleDom) {
// returns from the current RWI list the best entry and removed this entry from the list
Object bestEntry;
TreeMap m;
TreeMap<Object, indexRWIEntry> m;
indexRWIEntry rwi;
while (sortedRWIEntries.size() > 0) {
bestEntry = sortedRWIEntries.firstKey();
@ -277,10 +277,10 @@ public final class plasmaSearchRankingProcess {
if (!skipDoubleDom) return new Object[]{bestEntry, rwi};
// check doubledom
String domhash = rwi.urlHash().substring(6);
m = (TreeMap) this.doubleDomCache.get(domhash);
m = (TreeMap<Object, indexRWIEntry>) this.doubleDomCache.get(domhash);
if (m == null) {
// first appearance of dom
m = new TreeMap();
m = new TreeMap<Object, indexRWIEntry>();
this.doubleDomCache.put(domhash, m);
return new Object[]{bestEntry, rwi};
}
@ -289,12 +289,12 @@ public final class plasmaSearchRankingProcess {
}
// no more entries in sorted RWI entries. Now take Elements from the doubleDomCache
// find best entry from all caches
Iterator i = this.doubleDomCache.values().iterator();
Iterator<TreeMap<Object, indexRWIEntry>> i = this.doubleDomCache.values().iterator();
bestEntry = null;
Object o;
indexRWIEntry bestrwi = null;
while (i.hasNext()) {
m = (TreeMap) i.next();
m = i.next();
if (m.size() == 0) continue;
if (bestEntry == null) {
bestEntry = m.firstKey();
@ -318,7 +318,7 @@ public final class plasmaSearchRankingProcess {
}
if (bestrwi == null) return null;
// finally remove the best entry from the doubledom cache
m = (TreeMap) this.doubleDomCache.get(bestrwi.urlHash().substring(6));
m = this.doubleDomCache.get(bestrwi.urlHash().substring(6));
m.remove(bestEntry);
return new Object[]{bestEntry, bestrwi};
}
@ -344,8 +344,8 @@ public final class plasmaSearchRankingProcess {
public synchronized int size() {
//assert sortedRWIEntries.size() == urlhashes.size() : "sortedRWIEntries.size() = " + sortedRWIEntries.size() + ", urlhashes.size() = " + urlhashes.size();
int c = sortedRWIEntries.size();
Iterator i = this.doubleDomCache.values().iterator();
while (i.hasNext()) c += ((TreeMap) i.next()).size();
Iterator<TreeMap<Object, indexRWIEntry>> i = this.doubleDomCache.values().iterator();
while (i.hasNext()) c += i.next().size();
return c;
}
@ -370,15 +370,15 @@ public final class plasmaSearchRankingProcess {
return iEntry;
}
public Iterator miss() {
public Iterator<String> miss() {
return this.misses.iterator();
}
public Set getReferences(int count) {
public Set<String> getReferences(int count) {
// create a list of words that had been computed by statistics over all
// words that appeared in the url or the description of all urls
Object[] refs = ref.getScores(count, false, 2, Integer.MAX_VALUE);
TreeSet s = new TreeSet(String.CASE_INSENSITIVE_ORDER);
TreeSet<String> s = new TreeSet<String>(String.CASE_INSENSITIVE_ORDER);
for (int i = 0; i < refs.length; i++) {
s.add((String) refs[i]);
}

@ -84,7 +84,7 @@ public class plasmaSnippetCache {
public static final int ERROR_NO_MATCH = 16;
private static int snippetsScoreCounter;
private static kelondroMScoreCluster snippetsScore;
private static kelondroMScoreCluster<String> snippetsScore;
private static HashMap snippetsCache;
/**
@ -104,7 +104,7 @@ public class plasmaSnippetCache {
parser = parserx;
log = logx;
snippetsScoreCounter = 0;
snippetsScore = new kelondroMScoreCluster();
snippetsScore = new kelondroMScoreCluster<String>();
snippetsCache = new HashMap();
faviconCache = new HashMap();
}
@ -450,7 +450,7 @@ public class plasmaSnippetCache {
// care for counter
if (snippetsScoreCounter == java.lang.Integer.MAX_VALUE) {
snippetsScoreCounter = 0;
snippetsScore = new kelondroMScoreCluster();
snippetsScore = new kelondroMScoreCluster<String>();
snippetsCache = new HashMap();
}

@ -42,16 +42,16 @@ import de.anomic.plasma.plasmaSwitchboard;
public class serverDomains {
// a dns cache
private static final Map nameCacheHit = Collections.synchronizedMap(new HashMap()); // a not-synchronized map resulted in deadlocks
private static final Set nameCacheMiss = Collections.synchronizedSet(new HashSet());
private static final kelondroMScoreCluster nameCacheHitAges = new kelondroMScoreCluster();
private static final kelondroMScoreCluster nameCacheMissAges = new kelondroMScoreCluster();
private static final Map<String, InetAddress> nameCacheHit = Collections.synchronizedMap(new HashMap<String, InetAddress>()); // a not-synchronized map resulted in deadlocks
private static final Set<String> nameCacheMiss = Collections.synchronizedSet(new HashSet<String>());
private static final kelondroMScoreCluster<String> nameCacheHitAges = new kelondroMScoreCluster<String>();
private static final kelondroMScoreCluster<String> nameCacheMissAges = new kelondroMScoreCluster<String>();
private static final int maxNameCacheHitAge = 24 * 60 * 60; // 24 hours in minutes
private static final int maxNameCacheMissAge = 24 * 60 * 60; // 24 hours in minutes
private static final int maxNameCacheHitSize = 3000;
private static final int maxNameCacheMissSize = 3000;
public static final List nameCacheNoCachingPatterns = Collections.synchronizedList(new LinkedList());
private static final Set nameCacheNoCachingList = Collections.synchronizedSet(new HashSet());
public static final List<String> nameCacheNoCachingPatterns = Collections.synchronizedList(new LinkedList<String>());
private static final Set<String> nameCacheNoCachingList = Collections.synchronizedSet(new HashSet<String>());
private static final long startTime = System.currentTimeMillis();
/**
@ -101,9 +101,10 @@ public class serverDomains {
) {
doCaching = false;
} else {
Iterator noCachingPatternIter = nameCacheNoCachingPatterns.iterator();
Iterator<String> noCachingPatternIter = nameCacheNoCachingPatterns.iterator();
String nextPattern;
while (noCachingPatternIter.hasNext()) {
String nextPattern = (String) noCachingPatternIter.next();
nextPattern = noCachingPatternIter.next();
if (ip.getHostName().matches(nextPattern)) {
// disallow dns caching for this host
nameCacheNoCachingList.add(ip.getHostName());

@ -59,11 +59,11 @@ import de.anomic.server.logging.serverLog;
public class yacyDHTAction implements yacyPeerAction {
protected yacySeedDB seedDB;
protected kelondroMScoreCluster seedCrawlReady;
protected kelondroMScoreCluster<String> seedCrawlReady;
public yacyDHTAction(yacySeedDB seedDB) {
this.seedDB = seedDB;
this.seedCrawlReady = new kelondroMScoreCluster();
this.seedCrawlReady = new kelondroMScoreCluster<String>();
// init crawl-ready table
try {
Iterator en = seedDB.seedsConnected(true, false, null, (float) 0.0);

@ -169,7 +169,7 @@ public class yacySearch extends Thread {
}
// put in seeds according to dht
final kelondroMScoreCluster ranking = new kelondroMScoreCluster();
final kelondroMScoreCluster<String> ranking = new kelondroMScoreCluster<String>();
final HashMap seeds = new HashMap();
yacySeed seed;
Iterator dhtEnum;

@ -346,7 +346,7 @@ public final class yacySeedDB {
if (count > sizeConnected()) count = sizeConnected();
// fill a score object
kelondroMScoreCluster seedScore = new kelondroMScoreCluster();
kelondroMScoreCluster<String> seedScore = new kelondroMScoreCluster<String>();
yacySeed ys;
long absage;
Iterator s = seedsConnected(true, false, null, (float) 0.0);

@ -561,7 +561,7 @@ public final class yacy {
enumerateFiles ef = new enumerateFiles(new File(dbRoot, "WORDS"), true, false, true, true);
File f;
String h;
kelondroMScoreCluster hs = new kelondroMScoreCluster();
kelondroMScoreCluster<String> hs = new kelondroMScoreCluster<String>();
while (ef.hasMoreElements()) {
f = (File) ef.nextElement();
h = f.getName().substring(0, yacySeedDB.commonHashLength);

Loading…
Cancel
Save