more generics

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4343 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent cbefc651ac
commit f4e9ff6ce9

@ -46,4 +46,4 @@ Thanks to:
Wolfgang Sander-Beurmann for ressources for Demo-Peer, and many public relations.
All of our users for helping the YaCy-Network grow.
For more information see http://www.yacy.net/yacy/Volunteers.html
For more information see http://yacy.net

@ -65,17 +65,17 @@ public class CookieMonitorOutgoing_p {
int entCount = 0;
int tmpCount = 0;
boolean dark = true;
Iterator i = switchboard.outgoingCookies.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, Object[]>> i = switchboard.outgoingCookies.entrySet().iterator();
Map.Entry<String, Object[]> entry;
String host, client;
Object[] cookies;
Date date;
Object[] oa;
while ((entCount < maxCount) && (i.hasNext())) {
// get out values
entry = (Map.Entry) i.next();
host = (String) entry.getKey();
oa = (Object[]) entry.getValue();
entry = i.next();
host = entry.getKey();
oa = entry.getValue();
date = (Date) oa[0];
client = (String) oa[1];
cookies = (Object[]) oa[2];

@ -272,7 +272,7 @@ public final class Settings_p {
HashMap configList = plasmaParser.getParserConfigList();
plasmaParserConfig[] configArray = (plasmaParserConfig[]) configList.values().toArray(new plasmaParserConfig[configList.size()]);
HashSet parserInfos = new HashSet(sb.parser.getAvailableParserList().values());
HashSet<ParserInfo> parserInfos = new HashSet<ParserInfo>(sb.parser.getAvailableParserList().values());
// // fetching a list of all available mimetypes
// List availableParserKeys = Arrays.asList(availableParsers.entrySet().toArray(new ParserInfo[availableParsers.size()]));
@ -295,9 +295,9 @@ public final class Settings_p {
prop.put("parser_" + parserIdx + "_colspan", configArray.length);
int mimeIdx = 0;
Enumeration mimeTypeIter = parserInfo.supportedMimeTypes.keys();
Enumeration<String> mimeTypeIter = parserInfo.supportedMimeTypes.keys();
while (mimeTypeIter.hasMoreElements()) {
String mimeType = (String)mimeTypeIter.nextElement();
String mimeType = mimeTypeIter.nextElement();
prop.put("parser_" + parserIdx + "_mime_" + mimeIdx + "_mimetype", mimeType);
//prop.put("parser_" + parserIdx + "_name", parserName);

@ -1,277 +0,0 @@
// indexCachedRI.java
// -----------------------------
// (C) 2006 by Michael Peter Christen; mc@anomic.de, Frankfurt a. M., Germany
// first published 7.11.2006 on http://www.anomic.de
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
// $LastChangedRevision: 1986 $
// $LastChangedBy: orbiter $
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package de.anomic.index;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroCloneableIterator;
import de.anomic.kelondro.kelondroMergeIterator;
import de.anomic.kelondro.kelondroOrder;
import de.anomic.kelondro.kelondroRotateIterator;
import de.anomic.kelondro.kelondroRow;
import de.anomic.server.logging.serverLog;
public class indexCachedRI implements indexRI {
private kelondroRow payloadrow;
private kelondroOrder indexOrder;
private indexRAMRI riExtern, riIntern;
private indexCollectionRI backend;
public boolean busyCacheFlush; // shows if a cache flush is currently performed
private int flushsize;
public indexCachedRI(indexRAMRI riExtern, indexRAMRI riIntern, indexCollectionRI backend, kelondroOrder payloadorder, kelondroRow payloadrow, serverLog log) {
this.riExtern = riExtern;
this.riIntern = riIntern;
this.backend = backend;
this.indexOrder = payloadorder;
this.payloadrow = payloadrow;
this.busyCacheFlush = false;
this.flushsize = 2000;
}
public kelondroRow payloadrow() {
return payloadrow;
}
public int minMem() {
return 1024 * 1024;
}
public void setWordFlushSize(int flushsize) {
this.flushsize = flushsize;
}
public void flushControl() {
// check for forced flush
synchronized (this) {
if (riExtern.size() > riExtern.getMaxWordCount()) {
flushCache(riExtern, riExtern.size() + flushsize - riExtern.getMaxWordCount());
}
if (riIntern.size() > riIntern.getMaxWordCount()) {
flushCache(riIntern, riIntern.size() + flushsize - riIntern.getMaxWordCount());
}
}
}
public long getUpdateTime(String wordHash) {
indexContainer entries = getContainer(wordHash, null);
if (entries == null) return 0;
return entries.updated();
}
public void addEntries(indexContainer entries, long updateTime, boolean intern) {
// add the entry
if (intern) {
riIntern.addEntries(entries, updateTime, true);
} else {
riExtern.addEntries(entries, updateTime, false);
flushControl();
}
}
public void flushCacheSome() {
flushCacheSome(riExtern);
flushCacheSome(riIntern);
}
private void flushCacheSome(indexRAMRI ram) {
flushCache(ram, flushsize);
while (ram.maxURLinCache() > 2048) flushCache(ram, 1);
}
private void flushCache(indexRAMRI ram, int count) {
if (count <= 0) return;
if (count > 5000) count = 5000;
busyCacheFlush = true;
String wordHash;
ArrayList<indexContainer> containerList = new ArrayList<indexContainer>();
synchronized (this) {
for (int i = 0; i < count; i++) { // possible position of outOfMemoryError ?
if (ram.size() == 0) break;
// select one word to flush
wordHash = ram.bestFlushWordHash();
// move one container from ram to flush list
indexContainer c = ram.deleteContainer(wordHash);
if (c != null) containerList.add(c);
}
// flush the containers
backend.addMultipleEntries(containerList);
}
busyCacheFlush = false;
}
public boolean hasContainer(String wordHash) {
if (riExtern.hasContainer(wordHash)) return true;
if (riIntern.hasContainer(wordHash)) return true;
if (backend.hasContainer(wordHash)) return true;
return false;
}
public indexContainer getContainer(String wordHash, Set<String> urlselection) {
// get from cache
indexContainer container = riExtern.getContainer(wordHash, urlselection);
if (container == null) {
container = riIntern.getContainer(wordHash, urlselection);
} else {
container.addAllUnique(riIntern.getContainer(wordHash, urlselection));
}
// get from collection index
if (container == null) {
container = backend.getContainer(wordHash, urlselection);
} else {
container.addAllUnique(backend.getContainer(wordHash, urlselection));
}
return container;
}
public Map<String, indexContainer> getContainers(Set<String> wordHashes, Set<String> urlselection, boolean interruptIfEmpty) {
// return map of wordhash:indexContainer
// retrieve entities that belong to the hashes
HashMap<String, indexContainer> containers = new HashMap<String, indexContainer>();
String singleHash;
indexContainer singleContainer;
Iterator<String> i = wordHashes.iterator();
while (i.hasNext()) {
// get next word hash:
singleHash = i.next();
// retrieve index
singleContainer = getContainer(singleHash, urlselection);
// check result
if (((singleContainer == null) || (singleContainer.size() == 0)) && (interruptIfEmpty)) return new HashMap<String, indexContainer>();
containers.put(singleHash, singleContainer);
}
return containers;
}
public int size() {
return java.lang.Math.max(backend.size(), java.lang.Math.max(riIntern.size(), riExtern.size()));
}
public int indexSize(String wordHash) {
int size = backend.indexSize(wordHash);
size += riIntern.indexSize(wordHash);
size += riExtern.indexSize(wordHash);
return size;
}
public void close() {
synchronized (this) {
riIntern.close();
riExtern.close();
backend.close();
}
}
public indexContainer deleteContainer(String wordHash) {
indexContainer c = riIntern.deleteContainer(wordHash);
if (c == null) c = riExtern.deleteContainer(wordHash); else c.addAllUnique(riExtern.deleteContainer(wordHash));
if (c == null) c = backend.deleteContainer(wordHash); else c.addAllUnique(backend.deleteContainer(wordHash));
return c;
}
public boolean removeEntry(String wordHash, String urlHash) {
boolean removed = false;
removed = removed | (riIntern.removeEntry(wordHash, urlHash));
removed = removed | (riExtern.removeEntry(wordHash, urlHash));
removed = removed | (backend.removeEntry(wordHash, urlHash));
return removed;
}
public int removeEntries(String wordHash, Set<String> urlHashes) {
int removed = 0;
removed += riIntern.removeEntries(wordHash, urlHashes);
removed += riExtern.removeEntries(wordHash, urlHashes);
removed += backend.removeEntries(wordHash, urlHashes);
return removed;
}
public String removeEntriesExpl(String wordHash, Set<String> urlHashes) {
String removed = "";
removed += riIntern.removeEntries(wordHash, urlHashes) + ", ";
removed += riExtern.removeEntries(wordHash, urlHashes) + ", ";
removed += backend.removeEntries(wordHash, urlHashes) + ", ";
return removed;
}
public TreeSet<indexContainer> indexContainerSet(String startHash, boolean ramOnly, boolean rot, int count) {
// creates a set of indexContainers
// this does not use the dhtInCache
kelondroOrder containerOrder = new indexContainerOrder((kelondroOrder) indexOrder.clone());
containerOrder.rotate(startHash.getBytes());
TreeSet<indexContainer> containers = new TreeSet<indexContainer>(containerOrder);
Iterator<indexContainer> i = wordContainers(startHash, ramOnly, rot);
if (ramOnly) count = Math.min(riExtern.size(), count);
indexContainer container;
while ((count > 0) && (i.hasNext())) {
container = i.next();
if ((container != null) && (container.size() > 0)) {
containers.add(container);
count--;
}
}
return containers;
}
public kelondroCloneableIterator<indexContainer> wordContainers(String startHash, boolean rot) {
// returns an iteration of indexContainers
return wordContainers(startHash, false, rot);
}
public kelondroCloneableIterator<indexContainer> wordContainers(String startHash, boolean ramOnly, boolean rot) {
kelondroCloneableIterator<indexContainer> i;
if (ramOnly) {
i = riExtern.wordContainers(startHash, false);
} else {
i = new kelondroMergeIterator(
riExtern.wordContainers(startHash, false),
backend.wordContainers(startHash, false),
new indexContainerOrder(this.indexOrder),
indexContainer.containerMergeMethod,
true);
}
if (rot) {
return new kelondroRotateIterator<indexContainer>(i, new String(kelondroBase64Order.zero(startHash.length())));
} else {
return i;
}
}
}

@ -54,7 +54,8 @@ public class kelondroCloneableMapIterator<E> implements kelondroCloneableIterato
this.last = null;
}
public kelondroCloneableMapIterator<E> clone(Object modifier) {
@SuppressWarnings("unchecked")
public kelondroCloneableMapIterator<E> clone(Object modifier) {
return new kelondroCloneableMapIterator(map, modifier);
}

@ -142,6 +142,7 @@ public class kelondroMSetTools {
return joinConstructiveByEnumeration(map1, map2, concatStrings);
}
@SuppressWarnings("unchecked")
private static <A, B> TreeMap<A, B> joinConstructiveByTest(TreeMap<A, B> small, TreeMap<A, B> large, boolean concatStrings) {
Iterator<Map.Entry<A, B>> mi = small.entrySet().iterator();
TreeMap<A, B> result = new TreeMap<A, B>(large.comparator());
@ -161,6 +162,7 @@ public class kelondroMSetTools {
return result;
}
@SuppressWarnings("unchecked")
private static <A, B> TreeMap<A, B> joinConstructiveByEnumeration(TreeMap<A, B> map1, TreeMap<A, B> map2, boolean concatStrings) {
// implement pairwise enumeration
Comparator<? super A> comp = map1.comparator();

@ -99,7 +99,8 @@ public class kelondroMergeIterator<E> implements kelondroCloneableIterator<E> {
return (na != null) || (nb != null);
}
public E next() {
@SuppressWarnings("unchecked")
public E next() {
E s;
if (na == null) {
s = nb;
@ -146,6 +147,7 @@ public class kelondroMergeIterator<E> implements kelondroCloneableIterator<E> {
throw new java.lang.UnsupportedOperationException("merge does not support remove");
}
@SuppressWarnings("unchecked")
public static <A> kelondroCloneableIterator<A> cascade(Set<kelondroCloneableIterator<A>> /*of*/ iterators, kelondroOrder<A> c, Method merger, boolean up) {
// this extends the ability to combine two iterators
// to the abiliy of combining a set of iterators
@ -154,7 +156,8 @@ public class kelondroMergeIterator<E> implements kelondroCloneableIterator<E> {
return cascade((Set<kelondroCloneableIterator<A>>) iterators.iterator(), c, merger, up);
}
private static <A> kelondroCloneableIterator<A> cascade(Iterator<A> /*of*/ iiterators, kelondroOrder<A> c, Method merger, boolean up) {
@SuppressWarnings("unchecked")
private static <A> kelondroCloneableIterator<A> cascade(Iterator<A> /*of*/ iiterators, kelondroOrder<A> c, Method merger, boolean up) {
if (iiterators == null) return null;
if (!(iiterators.hasNext())) return null;
kelondroCloneableIterator<A> one = (kelondroCloneableIterator<A>) iiterators.next();

@ -220,13 +220,25 @@ public class kelondroSplitTable implements kelondroIndex {
if (suffix == null) return null;
kelondroIndex table = (kelondroIndex) tables.get(suffix);
if (table == null) {
// make new table
if (serverMemory.request(minimumRAM4Eco, true)) {
// enough memory for a ecoTable
table = new kelondroEcoTable(new File(path, tablename + "." + suffix), rowdef, false, EcoFSBufferSize);
// open table
File f = new File(path, tablename + "." + suffix);
if (f.exists()) {
if (f.isDirectory()) {
// open a flex table
table = new kelondroFlexTable(path, tablename + "." + suffix, -1, rowdef, 0, true);
} else {
// open a eco table
table = new kelondroEcoTable(f, rowdef, false, EcoFSBufferSize);
}
} else {
// use the flex table
table = new kelondroFlexTable(path, tablename + "." + suffix, -1, rowdef, 0, true);
// make new table
if (serverMemory.request(minimumRAM4Eco, true)) {
// enough memory for a ecoTable
table = new kelondroEcoTable(f, rowdef, false, EcoFSBufferSize);
} else {
// use the flex table
table = new kelondroFlexTable(path, tablename + "." + suffix, -1, rowdef, 0, true);
}
}
tables.put(suffix, table);
}

@ -118,7 +118,8 @@ public class kelondroSplittedTree implements kelondroIndex {
return ktfs[partition(key)].get(key);
}
public synchronized void putMultiple(List<kelondroRow.Entry> rows) throws IOException {
@SuppressWarnings("unchecked")
public synchronized void putMultiple(List<kelondroRow.Entry> rows) throws IOException {
Iterator<kelondroRow.Entry> i = rows.iterator();
kelondroRow.Entry row;
ArrayList<kelondroRow.Entry>[] parts = new ArrayList[ktfs.length];

@ -68,6 +68,7 @@ public class kelondroXMLTables {
this.tables = new Hashtable<String, Hashtable<String, String>>();
}
@SuppressWarnings("unchecked")
public kelondroXMLTables(File file) throws IOException {
this.propFile = file;
this.timestamp = System.currentTimeMillis();

@ -61,7 +61,7 @@ public interface IResourceInfo {
/**
* @return the resource information
*/
public Map getMap();
public Map<String, String> getMap();
/**
* @return the URL of this content

@ -4,7 +4,7 @@ import java.util.Hashtable;
public class ParserInfo {
// general parser info
public Class parserClass;
public Class<?> parserClass;
public String parserClassName;
public String parserName;
@ -12,7 +12,7 @@ public class ParserInfo {
// parser properties
public String[] libxDependencies;
public Hashtable supportedMimeTypes;
public Hashtable<String, String> supportedMimeTypes;
// usage statistic
public int usageCount = 0;

@ -166,10 +166,10 @@ public class plasmaGrafics {
// draw in the search target
plasmaSearchQuery query = event.getQuery();
Iterator i = query.queryHashes.iterator();
Iterator<String> i = query.queryHashes.iterator();
eventPicture.setColor(ymageMatrix.GREY);
while (i.hasNext()) {
hash = (String) i.next();
hash = i.next();
angle = (int) (360 * yacySeed.dhtPosition(hash));
eventPicture.arcLine(cx, cy, cr - 20, cr, angle);
}
@ -211,7 +211,7 @@ public class plasmaGrafics {
// draw connected senior and principals
int count = 0;
int totalCount = 0;
Iterator e = yacyCore.seedDB.seedsConnected(true, false, null, (float) 0.0);
Iterator<yacySeed> e = yacyCore.seedDB.seedsConnected(true, false, null, (float) 0.0);
while (e.hasNext() && count < maxCount) {
seed = (yacySeed) e.next();

@ -99,13 +99,13 @@ public final class plasmaHTCache {
public static final long oneday = 1000 * 60 * 60 * 24; // milliseconds of a day
private static kelondroMapObjects responseHeaderDB = null;
private static final LinkedList cacheStack = new LinkedList();
private static final Map cacheAge = Collections.synchronizedMap(new TreeMap()); // a <date+hash, cache-path> - relation
private static final LinkedList<Entry> cacheStack = new LinkedList<Entry>();
private static final Map<String, File> cacheAge = Collections.synchronizedMap(new TreeMap<String, File>()); // a <date+hash, cache-path> - relation
public static long curCacheSize = 0;
public static long maxCacheSize;
public static File cachePath;
public static final serverLog log = new serverLog("HTCACHE");
public static final HashSet filesInUse = new HashSet(); // can we delete this file
public static final HashSet<File> filesInUse = new HashSet<File>(); // can we delete this file
public static String cacheLayout;
public static boolean cacheMigration;
@ -446,13 +446,13 @@ public final class plasmaHTCache {
private static void cleanupDoIt(long newCacheSize) {
File file;
synchronized (cacheAge) {
Iterator iter = cacheAge.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, File>> iter = cacheAge.entrySet().iterator();
Map.Entry<String, File> entry;
while (iter.hasNext() && curCacheSize >= newCacheSize) {
if (Thread.currentThread().isInterrupted()) return;
entry = (Map.Entry) iter.next();
String key = (String) entry.getKey();
file = (File) entry.getValue();
entry = iter.next();
String key = entry.getKey();
file = entry.getValue();
long t = Long.parseLong(key.substring(0, 16), 16);
if (System.currentTimeMillis() - t < 300000) break; // files must have been at least 5 minutes in the cache before they are deleted
if (file != null) {
@ -536,9 +536,9 @@ public final class plasmaHTCache {
//System.out.println("%" + (String) cacheAge.firstKey() + "=" + cacheAge.get(cacheAge.firstKey()));
long ageHours = 0;
if (!cacheAge.isEmpty()) {
Iterator i = cacheAge.keySet().iterator();
Iterator<String> i = cacheAge.keySet().iterator();
if (i.hasNext()) try {
ageHours = (System.currentTimeMillis() - Long.parseLong(((String) i.next()).substring(0, 16), 16)) / 3600000;
ageHours = (System.currentTimeMillis() - Long.parseLong(i.next().substring(0, 16), 16)) / 3600000;
} catch (NumberFormatException e) {
ageHours = 0;
} else {
@ -610,7 +610,7 @@ public final class plasmaHTCache {
public static IResourceInfo loadResourceInfo(yacyURL url) throws UnsupportedProtocolException, IllegalAccessException {
// loading data from database
Map hdb = responseHeaderDB.getMap(url.hash());
Map<String, String> hdb = responseHeaderDB.getMap(url.hash());
if (hdb == null) return null;
// generate the cached object
@ -841,8 +841,7 @@ public final class plasmaHTCache {
}
if (url != null) return url;
// try responseHeaderDB
Map hdb;
hdb = responseHeaderDB.getMap(urlHash);
Map<String, String> hdb = responseHeaderDB.getMap(urlHash);
if (hdb != null) {
Object origRequestLine = hdb.get(httpHeader.X_YACY_ORIGINAL_REQUEST_LINE);
if ((origRequestLine != null)&&(origRequestLine instanceof String)) {

@ -81,7 +81,7 @@ public final class plasmaParser {
* A list containing all installed parsers and the mimeType that they support
* @see #loadAvailableParserList()
*/
public static final Properties availableParserList = new Properties();
public static final HashMap<String, ParserInfo> availableParserList = new HashMap<String, ParserInfo>();
/**
* A list of file extensions and mime types that are supported by the html-parser
@ -170,7 +170,7 @@ public final class plasmaParser {
return this.theLogger;
}
public static HashMap getParserConfigList() {
public static HashMap<String, plasmaParserConfig> getParserConfigList() {
return parserConfigList;
}
@ -389,7 +389,7 @@ public final class plasmaParser {
return mimeTypeLookupByFileExt.getProperty(fileExt,"application/octet-stream");
}
public Hashtable getAvailableParserList() {
public HashMap<String, ParserInfo> getAvailableParserList() {
return plasmaParser.availableParserList;
}
@ -451,7 +451,7 @@ public final class plasmaParser {
}
// loading the list of mime-types that are supported by this parser class
Hashtable supportedMimeTypes = ((Parser)theParser).getSupportedMimeTypes();
Hashtable<String, String> supportedMimeTypes = ((Parser) theParser).getSupportedMimeTypes();
// creating a parser info object
ParserInfo parserInfo = new ParserInfo();
@ -460,12 +460,12 @@ public final class plasmaParser {
parserInfo.libxDependencies = neededLibx;
parserInfo.supportedMimeTypes = supportedMimeTypes;
parserInfo.parserVersionNr = ((Parser)theParser).getVersion();
parserInfo.parserName = ((Parser)theParser).getName();
parserInfo.parserName = ((Parser) theParser).getName();
Iterator mimeTypeIterator = supportedMimeTypes.keySet().iterator();
while (mimeTypeIterator.hasNext()) {
String mimeType = (String) mimeTypeIterator.next();
availableParserList.put(mimeType,parserInfo );
availableParserList.put(mimeType, parserInfo);
serverLog.logInfo("PARSER", "Found functional parser for mimeType '" + mimeType + "'." +
"\n\tName: " + parserInfo.parserName +
"\n\tVersion: " + parserInfo.parserVersionNr +

@ -30,6 +30,7 @@ import java.util.ConcurrentModificationException;
import java.util.Iterator;
import de.anomic.server.serverProfiling;
import de.anomic.server.serverProfiling.Event;
import de.anomic.yacy.yacyCore;
import de.anomic.ymage.ymageChart;
import de.anomic.ymage.ymageMatrix;
@ -51,11 +52,11 @@ public class plasmaProfiling {
}
public static long maxPayload(String eventname, long min) {
Iterator i = serverProfiling.history(eventname);
Iterator<Event> i = serverProfiling.history(eventname);
serverProfiling.Event event;
long max = min, l;
while (i.hasNext()) {
event = (serverProfiling.Event) i.next();
event = i.next();
l = ((Long) event.payload).longValue();
if (l > max) max = l;
}
@ -84,13 +85,13 @@ public class plasmaProfiling {
chart.declareDimension(ymageChart.DIMENSION_RIGHT, rightscale, vspace * rightscale / (int)(maxbytes / 1024 / 1024), 0, "0000FF", "CCCCCC", "MEMORY/MEGABYTE");
// draw ppm
Iterator i = serverProfiling.history("ppm");
Iterator<Event> i = serverProfiling.history("ppm");
long time, now = System.currentTimeMillis(), bytes;
int x0 = 1, x1, y0 = 0, y1, ppm;
serverProfiling.Event event;
try {
while (i.hasNext()) {
event = (serverProfiling.Event) i.next();
event = i.next();
time = event.time - now;
ppm = (int) ((Long) event.payload).longValue();
x1 = (int) (time/1000);

@ -44,7 +44,6 @@ package de.anomic.plasma;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeSet;
import de.anomic.htmlFilter.htmlFilterImageEntry;
@ -54,11 +53,11 @@ import de.anomic.yacy.yacyURL;
public final class plasmaSearchImages {
private TreeSet images;
private TreeSet<htmlFilterImageEntry> images;
public plasmaSearchImages(long maxTime, yacyURL url, int depth) {
long start = System.currentTimeMillis();
this.images = new TreeSet();
this.images = new TreeSet<htmlFilterImageEntry>();
if (maxTime > 10) {
Object[] resource = plasmaSnippetCache.getResource(url, true, (int) maxTime, false);
InputStream res = (InputStream) resource[0];
@ -80,13 +79,11 @@ public final class plasmaSearchImages {
// add also links from pages one step deeper, if depth > 0
if (depth > 0) {
Map hl = document.getHyperlinks();
Iterator i = hl.entrySet().iterator();
Iterator<String> i = document.getHyperlinks().keySet().iterator();
String nexturlstring;
while (i.hasNext()) {
Map.Entry e = (Map.Entry) i.next();
String nexturlstring;
try {
nexturlstring = new yacyURL((String) e.getKey(), null).toNormalform(true, true);
nexturlstring = new yacyURL(i.next(), null).toNormalform(true, true);
addAll(new plasmaSearchImages(serverDate.remainingTime(start, maxTime, 10), new yacyURL(nexturlstring, null), depth - 1));
} catch (MalformedURLException e1) {
e1.printStackTrace();
@ -104,11 +101,11 @@ public final class plasmaSearchImages {
}
}
private void addAll(TreeSet ts) {
Iterator i = ts.iterator();
private void addAll(TreeSet<htmlFilterImageEntry> ts) {
Iterator<htmlFilterImageEntry> i = ts.iterator();
htmlFilterImageEntry ie;
while (i.hasNext()) {
ie = (htmlFilterImageEntry) i.next();
ie = i.next();
if (images.contains(ie)) {
if ((ie.height() > 0) && (ie.width() > 0)) images.add(ie);
} else {
@ -117,7 +114,7 @@ public final class plasmaSearchImages {
}
}
public Iterator entries() {
public Iterator<htmlFilterImageEntry> entries() {
// returns htmlFilterImageEntry - Objects
return images.iterator();
}

@ -85,7 +85,7 @@ public class plasmaSnippetCache {
private static int snippetsScoreCounter;
private static kelondroMScoreCluster<String> snippetsScore;
private static HashMap snippetsCache;
private static HashMap<String, String> snippetsCache;
/**
* a cache holding URLs to favicons specified by the page content, e.g. by using the html link-tag. e.g.
@ -93,7 +93,7 @@ public class plasmaSnippetCache {
* &lt;link rel="shortcut icon" type="image/x-icon" href="../src/favicon.ico"&gt;
* </pre>
*/
private static HashMap faviconCache;
private static HashMap<String, yacyURL> faviconCache;
private static plasmaParser parser;
private static serverLog log;
@ -105,8 +105,8 @@ public class plasmaSnippetCache {
log = logx;
snippetsScoreCounter = 0;
snippetsScore = new kelondroMScoreCluster<String>();
snippetsCache = new HashMap();
faviconCache = new HashMap();
snippetsCache = new HashMap<String, String>();
faviconCache = new HashMap<String, yacyURL>();
}
public static class TextSnippet {
@ -114,14 +114,14 @@ public class plasmaSnippetCache {
private String line;
private String error;
private int errorCode;
private Set remaingHashes;
private Set<String> remaingHashes;
private yacyURL favicon;
public TextSnippet(yacyURL url, String line, int errorCode, Set remaingHashes, String errortext) {
public TextSnippet(yacyURL url, String line, int errorCode, Set<String> remaingHashes, String errortext) {
this(url,line,errorCode,remaingHashes,errortext,null);
}
public TextSnippet(yacyURL url, String line, int errorCode, Set remaingHashes, String errortext, yacyURL favicon) {
public TextSnippet(yacyURL url, String line, int errorCode, Set<String> remaingHashes, String errortext, yacyURL favicon) {
this.url = url;
this.line = line;
this.errorCode = errorCode;
@ -147,21 +147,21 @@ public class plasmaSnippetCache {
public int getErrorCode() {
return errorCode;
}
public Set getRemainingHashes() {
public Set<String> getRemainingHashes() {
return this.remaingHashes;
}
public String getLineMarked(Set queryHashes) {
public String getLineMarked(Set<String> queryHashes) {
if (line == null) return "";
if ((queryHashes == null) || (queryHashes.size() == 0)) return line.trim();
if (line.endsWith(".")) line = line.substring(0, line.length() - 1);
Iterator i = queryHashes.iterator();
Iterator<String> i = queryHashes.iterator();
String h;
String[] w = line.split(" ");
String prefix = "";
String postfix = "";
int len = 0;
while (i.hasNext()) {
h = (String) i.next();
h = i.next();
for (int j = 0; j < w.length; j++) {
//ignore punctuation marks (contrib [MN])
//note to myself:
@ -239,12 +239,13 @@ public class plasmaSnippetCache {
}
}
public static boolean existsInCache(yacyURL url, Set queryhashes) {
public static boolean existsInCache(yacyURL url, Set<String> queryhashes) {
String hashes = yacySearch.set2string(queryhashes);
return retrieveFromCache(hashes, url.hash()) != null;
}
public static TextSnippet retrieveTextSnippet(yacyURL url, Set queryhashes, boolean fetchOnline, boolean pre, int snippetMaxLength, int timeout, int maxDocLen) {
@SuppressWarnings("unchecked")
public static TextSnippet retrieveTextSnippet(yacyURL url, Set<String> queryhashes, boolean fetchOnline, boolean pre, int snippetMaxLength, int timeout, int maxDocLen) {
// heise = "0OQUNU3JSs05"
if (queryhashes.size() == 0) {
@ -258,7 +259,7 @@ public class plasmaSnippetCache {
String line = retrieveFromCache(wordhashes, url.hash());
if (line != null) {
//System.out.println("found snippet for URL " + url + " in cache: " + line);
return new TextSnippet(url, line, source, null, null,(yacyURL) faviconCache.get(url.hash()));
return new TextSnippet(url, line, source, null, null, faviconCache.get(url.hash()));
}
/* ===========================================================================
@ -336,11 +337,11 @@ public class plasmaSnippetCache {
// we have found a parseable non-empty file: use the lines
// compute snippet from text
final Iterator sentences = document.getSentences(pre);
final Iterator<StringBuffer> sentences = document.getSentences(pre);
if (sentences == null) return new TextSnippet(url, null, ERROR_PARSER_NO_LINES, queryhashes, "parser returned no sentences",resFavicon);
Object[] tsr = computeTextSnippet(sentences, queryhashes, snippetMaxLength);
String textline = (tsr == null) ? null : (String) tsr[0];
Set remainingHashes = (tsr == null) ? queryhashes : (Set) tsr[1];
Set<String> remainingHashes = (tsr == null) ? queryhashes : (Set) tsr[1];
// compute snippet from media
String audioline = computeMediaSnippet(document.getAudiolinks(), queryhashes);
@ -451,7 +452,7 @@ public class plasmaSnippetCache {
if (snippetsScoreCounter == java.lang.Integer.MAX_VALUE) {
snippetsScoreCounter = 0;
snippetsScore = new kelondroMScoreCluster<String>();
snippetsCache = new HashMap();
snippetsCache = new HashMap<String, String>();
}
// flush cache if cache is full
@ -465,19 +466,19 @@ public class plasmaSnippetCache {
private static String retrieveFromCache(String wordhashes, String urlhash) {
// generate key
String key = urlhash + wordhashes;
return (String) snippetsCache.get(key);
return snippetsCache.get(key);
}
private static String computeMediaSnippet(Map media, Set queryhashes) {
Iterator i = media.entrySet().iterator();
Map.Entry entry;
private static String computeMediaSnippet(Map<String, String> media, Set<String> queryhashes) {
Iterator<Map.Entry<String, String>> i = media.entrySet().iterator();
Map.Entry<String, String> entry;
String url, desc;
Set s;
Set<String> s;
String result = "";
while (i.hasNext()) {
entry = (Map.Entry) i.next();
url = (String) entry.getKey();
desc = (String) entry.getValue();
entry = i.next();
url = entry.getKey();
desc = entry.getValue();
s = removeAppearanceHashes(url, queryhashes);
if (s.size() == 0) {
result += "<br /><a href=\"" + url + "\">" + ((desc.length() == 0) ? url : desc) + "</a>";
@ -493,37 +494,38 @@ public class plasmaSnippetCache {
return result.substring(6);
}
@SuppressWarnings("unchecked")
private static Object[] /*{String - the snippet, Set - remaining hashes}*/
computeTextSnippet(Iterator sentences, Set queryhashes, int maxLength) {
computeTextSnippet(Iterator<StringBuffer> sentences, Set<String> queryhashes, int maxLength) {
try {
if (sentences == null) return null;
if ((queryhashes == null) || (queryhashes.size() == 0)) return null;
Iterator j;
HashMap hs;
Iterator<String> j;
HashMap<String, Integer> hs;
StringBuffer sentence;
TreeMap os = new TreeMap();
TreeMap<Integer, StringBuffer> os = new TreeMap<Integer, StringBuffer>();
int uniqCounter = 9999;
int score;
while (sentences.hasNext()) {
sentence = (StringBuffer) sentences.next();
sentence = sentences.next();
hs = hashSentence(sentence.toString());
j = queryhashes.iterator();
score = 0;
while (j.hasNext()) {if (hs.containsKey((String) j.next())) score++;}
while (j.hasNext()) {if (hs.containsKey(j.next())) score++;}
if (score > 0) {
os.put(new Integer(1000000 * score - sentence.length() * 10000 + uniqCounter--), sentence);
}
}
String result;
Set remaininghashes;
Set<String> remaininghashes;
while (os.size() > 0) {
sentence = (StringBuffer) os.remove((Integer) os.lastKey()); // sentence with the biggest score
sentence = os.remove(os.lastKey()); // sentence with the biggest score
Object[] tsr = computeTextSnippet(sentence.toString(), queryhashes, maxLength);
if (tsr == null) continue;
result = (String) tsr[0];
if ((result != null) && (result.length() > 0)) {
remaininghashes = (Set) tsr[1];
remaininghashes = (Set<String>) tsr[1];
if (remaininghashes.size() == 0) {
// we have found the snippet
return new Object[]{result, remaininghashes};
@ -553,23 +555,21 @@ public class plasmaSnippetCache {
}
private static Object[] /*{String - the snippet, Set - remaining hashes}*/
computeTextSnippet(String sentence, Set queryhashes, int maxLength) {
computeTextSnippet(String sentence, Set<String> queryhashes, int maxLength) {
try {
if (sentence == null) return null;
if ((queryhashes == null) || (queryhashes.size() == 0)) return null;
Iterator j;
HashMap hs;
String hash;
// find all hashes that appear in the sentence
hs = hashSentence(sentence);
j = queryhashes.iterator();
HashMap<String, Integer> hs = hashSentence(sentence);
Iterator<String> j = queryhashes.iterator();
Integer pos;
int p, minpos = sentence.length(), maxpos = -1;
HashSet remainingHashes = new HashSet();
HashSet<String> remainingHashes = new HashSet<String>();
while (j.hasNext()) {
hash = (String) j.next();
pos = (Integer) hs.get(hash);
hash = j.next();
pos = hs.get(hash);
if (pos == null) {
remainingHashes.add(hash);
} else {

@ -138,6 +138,7 @@ import de.anomic.plasma.crawler.plasmaCrawlQueues;
import de.anomic.plasma.crawler.plasmaProtocolLoader;
import de.anomic.plasma.dbImport.dbImportManager;
import de.anomic.plasma.parser.ParserException;
import de.anomic.plasma.plasmaSwitchboardQueue.Entry;
import de.anomic.plasma.urlPattern.defaultURLPattern;
import de.anomic.plasma.urlPattern.plasmaURLPattern;
import de.anomic.server.serverAbstractSwitch;
@ -217,12 +218,12 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
public boolean rankingOn;
public plasmaRankingDistribution rankingOwnDistribution;
public plasmaRankingDistribution rankingOtherDistribution;
public HashMap outgoingCookies, incomingCookies;
public HashMap<String, Object[]> outgoingCookies, incomingCookies;
public kelondroMapTable facilityDB;
public plasmaParser parser;
public long proxyLastAccess, localSearchLastAccess, remoteSearchLastAccess;
public yacyCore yc;
public HashMap indexingTasksInProcess;
public HashMap<String, plasmaSwitchboardQueue.Entry> indexingTasksInProcess;
public userDB userDB;
public bookmarksDB bookmarksDB;
public plasmaWebStructure webStructure;
@ -872,7 +873,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
public static final String DBFILE_USER = "DATA/SETTINGS/user.db";
public Hashtable crawlJobsStatus = new Hashtable();
public Hashtable<String, Object[]> crawlJobsStatus = new Hashtable<String, Object[]>();
private static plasmaSwitchboard sb;
@ -898,7 +899,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
// _always_ overwritten each time with the default values. This is done so on purpose.
// the network definition should be made either consistent for all peers,
// or independently using a bootstrap URL
Map initProps;
Map<String, String> initProps;
if (networkUnitDefinition.startsWith("http://")) {
try {
this.setConfig(httpc.loadHashMap(new yacyURL(networkUnitDefinition, null), remoteProxyConfig));
@ -964,7 +965,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
this.log.logConfig("HTDOCS Path: " + this.htDocsPath.toString());
this.rankingPath = getConfigPath(RANKING_PATH, RANKING_PATH_DEFAULT);
this.log.logConfig("Ranking Path: " + this.rankingPath.toString());
this.rankingPermissions = new HashMap(); // mapping of permission - to filename.
this.rankingPermissions = new HashMap<String, String>(); // mapping of permission - to filename.
this.workPath = getConfigPath(WORK_PATH, WORK_PATH_DEFAULT);
this.log.logConfig("Work Path: " + this.workPath.toString());
@ -985,7 +986,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
// read only once upon first instantiation of this class
String f = getConfig(LIST_BLUE, LIST_BLUE_DEFAULT);
File plasmaBlueListFile = new File(f);
if (f != null) blueList = kelondroMSetTools.loadList(plasmaBlueListFile, kelondroNaturalOrder.naturalComparator); else blueList= new TreeSet();
if (f != null) blueList = kelondroMSetTools.loadList(plasmaBlueListFile, kelondroNaturalOrder.naturalComparator); else blueList= new TreeSet<String>();
this.log.logConfig("loaded blue-list from file " + plasmaBlueListFile.getName() + ", " +
blueList.size() + " entries, " +
ppRamString(plasmaBlueListFile.length()/1024));
@ -1150,14 +1151,14 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
this.sbQueue = new plasmaSwitchboardQueue(this.wordIndex.loadedURL, new File(this.plasmaPath, "switchboardQueue2.stack"), this.profilesActiveCrawls);
// create in process list
this.indexingTasksInProcess = new HashMap();
this.indexingTasksInProcess = new HashMap<String, plasmaSwitchboardQueue.Entry>();
// going through the sbQueue Entries and registering all content files as in use
int count = 0;
plasmaSwitchboardQueue.Entry queueEntry;
Iterator i1 = sbQueue.entryIterator(true);
Iterator<plasmaSwitchboardQueue.Entry> i1 = sbQueue.entryIterator(true);
while (i1.hasNext()) {
queueEntry = (plasmaSwitchboardQueue.Entry) i1.next();
queueEntry = i1.next();
if ((queueEntry != null) && (queueEntry.url() != null) && (queueEntry.cacheFile().exists())) {
plasmaHTCache.filesInUse.add(queueEntry.cacheFile());
count++;
@ -1201,8 +1202,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
// init cookie-Monitor
this.log.logConfig("Starting Cookie Monitor");
this.outgoingCookies = new HashMap();
this.incomingCookies = new HashMap();
this.outgoingCookies = new HashMap<String, Object[]>();
this.incomingCookies = new HashMap<String, Object[]>();
// init search history trackers
this.localSearchTracker = new HashMap<String, TreeSet<Long>>(); // String:TreeSet - IP:set of Long(accessTime)

@ -124,7 +124,7 @@ public class plasmaSwitchboardQueue {
}
public synchronized Entry remove(String urlHash) {
Iterator i = sbQueueStack.stackIterator(true);
Iterator<kelondroRow.Entry> i = sbQueueStack.stackIterator(true);
kelondroRow.Entry rowentry;
Entry entry;
while (i.hasNext()) {
@ -158,15 +158,15 @@ public class plasmaSwitchboardQueue {
super.finalize();
}
public Iterator entryIterator(boolean up) {
public Iterator<Entry> entryIterator(boolean up) {
// iterates the elements in an ordered way.
// returns plasmaSwitchboardQueue.Entry - type Objects
return new entryIterator(up);
}
public class entryIterator implements Iterator {
public class entryIterator implements Iterator<Entry> {
Iterator rows;
Iterator<kelondroRow.Entry> rows;
public entryIterator(boolean up) {
rows = sbQueueStack.stackIterator(up);
@ -176,7 +176,7 @@ public class plasmaSwitchboardQueue {
return rows.hasNext();
}
public Object next() {
public Entry next() {
return new Entry((kelondroRow.Entry) rows.next());
}

@ -56,7 +56,7 @@ public class plasmaWebStructure {
private serverLog log;
private File rankingPath, structureFile;
private String crlFile, crgFile;
private TreeMap structure; // String2String with <b64hash(6)>','<host> to <date-yyyymmdd(8)>{<target-b64hash(6)><target-count-hex(4)>}*
private TreeMap<String, String> structure; // <b64hash(6)>','<host> to <date-yyyymmdd(8)>{<target-b64hash(6)><target-count-hex(4)>}*
public plasmaWebStructure(serverLog log, File rankingPath, String crlFile, String crgFile, File structureFile) {
this.log = log;
@ -64,30 +64,30 @@ public class plasmaWebStructure {
this.crlFile = crlFile;
this.crgFile = crgFile;
this.crg = new StringBuffer(maxCRGDump);
this.structure = new TreeMap();
this.structure = new TreeMap<String, String>();
this.structureFile = structureFile;
// load web structure
Map loadedStructure = serverFileUtils.loadHashMap(this.structureFile);
Map<String, String> loadedStructure = serverFileUtils.loadHashMap(this.structureFile);
if (loadedStructure != null) this.structure.putAll(loadedStructure);
// delete outdated entries in case the structure is too big
if (this.structure.size() > maxhosts) {
// fill a set with last-modified - dates of the structure
TreeSet delset = new TreeSet();
Map.Entry entry;
Iterator i = this.structure.entrySet().iterator();
TreeSet<String> delset = new TreeSet<String>();
Map.Entry<String, String> entry;
Iterator<Map.Entry<String, String>> i = this.structure.entrySet().iterator();
String key, value;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
key = (String) entry.getKey();
value = (String) entry.getValue();
entry = i.next();
key = entry.getKey();
value = entry.getValue();
delset.add(value.substring(0, 8) + key);
}
int delcount = this.structure.size() - (maxhosts * 9 / 10);
i = delset.iterator();
while ((delcount > 0) && (i.hasNext())) {
this.structure.remove(((String) i.next()).substring(8));
Iterator<String> j = delset.iterator();
while ((delcount > 0) && (j.hasNext())) {
this.structure.remove(j.next().substring(8));
delcount--;
}
}
@ -97,8 +97,8 @@ public class plasmaWebStructure {
assert url.hash().equals(baseurlhash);
// generate citation reference
Map hl = document.getHyperlinks();
Iterator it = hl.entrySet().iterator();
Map<String, String> hl = document.getHyperlinks();
Iterator<String> it = hl.keySet().iterator();
String nexturlhash;
StringBuffer cpg = new StringBuffer(12 * (hl.size() + 1) + 1);
StringBuffer cpl = new StringBuffer(12 * (hl.size() + 1) + 1);
@ -107,7 +107,7 @@ public class plasmaWebStructure {
int LCount = 0;
while (it.hasNext()) {
try {
nexturlhash = (new yacyURL((String) ((Map.Entry) it.next()).getKey(), null)).hash();
nexturlhash = (new yacyURL(it.next(), null)).hash();
if (nexturlhash != null) {
if (nexturlhash.substring(6).equals(lhp)) {
// this is a inbound link
@ -184,9 +184,9 @@ public class plasmaWebStructure {
return (refs.length() - 8) / 10;
}
private static Map refstr2map(String refs) {
if ((refs == null) || (refs.length() <= 8)) return new HashMap();
Map map = new HashMap();
private static Map<String, Integer> refstr2map(String refs) {
if ((refs == null) || (refs.length() <= 8)) return new HashMap<String, Integer>();
Map<String, Integer> map = new HashMap<String, Integer>();
String c;
int refsc = refstr2count(refs);
for (int i = 0; i < refsc; i++) {
@ -196,16 +196,16 @@ public class plasmaWebStructure {
return map;
}
private static String map2refstr(Map map) {
private static String map2refstr(Map<String, Integer> map) {
StringBuffer s = new StringBuffer(map.size() * 10);
s.append(serverDate.formatShortDay(new Date()));
Iterator i = map.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, Integer>> i = map.entrySet().iterator();
Map.Entry<String, Integer> entry;
String h;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
s.append((String) entry.getKey());
h = Integer.toHexString(((Integer) entry.getValue()).intValue());
entry = i.next();
s.append(entry.getKey());
h = Integer.toHexString(entry.getValue().intValue());
if (h.length() == 0) {
s.append("0000");
} else if (h.length() == 1) {
@ -223,16 +223,16 @@ public class plasmaWebStructure {
return s.toString();
}
public Map references(String domhash) {
public Map<String, Integer> references(String domhash) {
// returns a map with a domhash(String):refcount(Integer) relation
assert domhash.length() == 6;
SortedMap tailMap = structure.tailMap(domhash);
if ((tailMap == null) || (tailMap.size() == 0)) return new HashMap();
String key = (String) tailMap.firstKey();
SortedMap<String, String> tailMap = structure.tailMap(domhash);
if ((tailMap == null) || (tailMap.size() == 0)) return new HashMap<String, Integer>();
String key = tailMap.firstKey();
if (key.startsWith(domhash)) {
return refstr2map((String) tailMap.get(key));
return refstr2map(tailMap.get(key));
} else {
return new HashMap();
return new HashMap<String, Integer>();
}
}
@ -240,11 +240,11 @@ public class plasmaWebStructure {
// returns the number of domains that are referenced by this domhash
assert domhash.length() == 6 : "domhash = " + domhash;
try {
SortedMap tailMap = structure.tailMap(domhash);
SortedMap<String, String> tailMap = structure.tailMap(domhash);
if ((tailMap == null) || (tailMap.size() == 0)) return 0;
String key = (String) tailMap.firstKey();
String key = tailMap.firstKey();
if (key.startsWith(domhash)) {
return refstr2count((String) tailMap.get(key));
return refstr2count(tailMap.get(key));
} else {
return 0;
}
@ -257,16 +257,16 @@ public class plasmaWebStructure {
// returns the domain as string, null if unknown
assert domhash.length() == 6;
try {
SortedMap tailMap = structure.tailMap(domhash);
SortedMap<String, String> tailMap = structure.tailMap(domhash);
if ((tailMap == null) || (tailMap.size() == 0)) return null;
String key = (String) tailMap.firstKey();
String key = tailMap.firstKey();
if (key.startsWith(domhash)) {
return key.substring(7);
} else {
return null;
}
} catch (ConcurrentModificationException e) {
// we dont want to implement a synchronization here,
// we don't want to implement a synchronization here,
// because this is 'only' used for a graphics application
// just return null
return null;
@ -277,7 +277,7 @@ public class plasmaWebStructure {
String domhash = url.hash().substring(6);
// parse the new reference string and join it with the stored references
Map refs = references(domhash);
Map<String, Integer> refs = references(domhash);
assert reference.length() % 12 == 0;
String dom;
int c;
@ -297,13 +297,13 @@ public class plasmaWebStructure {
// shrink the references: the entry with the smallest number of references is removed
int minrefcount = Integer.MAX_VALUE;
String minrefkey = null;
Iterator i = refs.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, Integer>> i = refs.entrySet().iterator();
Map.Entry<String, Integer> entry;
findloop: while (i.hasNext()) {
entry = (Map.Entry) i.next();
if (((Integer) entry.getValue()).intValue() < minrefcount) {
minrefcount = ((Integer) entry.getValue()).intValue();
minrefkey = (String) entry.getKey();
entry = i.next();
if (entry.getValue().intValue() < minrefcount) {
minrefcount = entry.getValue().intValue();
minrefkey = entry.getKey();
}
if (minrefcount == 1) break findloop;
}
@ -328,29 +328,29 @@ public class plasmaWebStructure {
public String hostWithMaxReferences() {
// find domain with most references
Iterator i = structure.entrySet().iterator();
Iterator<Map.Entry<String, String>> i = structure.entrySet().iterator();
int refsize, maxref = 0;
String maxhost = null;
Map.Entry entry;
Map.Entry<String, String> entry;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
refsize = ((String) entry.getValue()).length();
entry = i.next();
refsize = entry.getValue().length();
if (refsize > maxref) {
maxref = refsize;
maxhost = ((String) entry.getKey()).substring(7);
maxhost = entry.getKey().substring(7);
}
}
return maxhost;
}
public Iterator structureEntryIterator() {
public Iterator<structureEntry> structureEntryIterator() {
// iterates objects of type structureEntry
return new structureIterator();
}
public class structureIterator implements Iterator {
public class structureIterator implements Iterator<structureEntry> {
private Iterator i;
private Iterator<Map.Entry<String, String>> i;
private structureEntry nextentry;
public structureIterator() {
@ -363,11 +363,11 @@ public class plasmaWebStructure {
}
private void next0() {
Map.Entry entry = null;
Map.Entry<String, String> entry = null;
String dom = null, ref;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
dom = (String) entry.getKey();
entry = i.next();
dom = entry.getKey();
if (dom.length() >= 8) break;
if (!i.hasNext()) {
nextentry = null;
@ -378,11 +378,11 @@ public class plasmaWebStructure {
nextentry = null;
return;
}
ref = (String) entry.getValue();
ref = entry.getValue();
nextentry = new structureEntry(dom.substring(0, 6), dom.substring(7), ref.substring(0, 8), refstr2map(ref));
}
public Object next() {
public structureEntry next() {
structureEntry r = nextentry;
next0();
return r;
@ -396,8 +396,8 @@ public class plasmaWebStructure {
public class structureEntry {
public String domhash, domain, date;
public Map references;
public structureEntry(String domhash, String domain, String date, Map references) {
public Map<String, Integer> references;
public structureEntry(String domhash, String domain, String date, Map<String, Integer> references) {
this.domhash = domhash;
this.domain = domain;
this.date = date;

@ -384,6 +384,7 @@ public final class plasmaWordIndex implements indexRI {
return containers;
}
@SuppressWarnings("unchecked")
public Map<String, indexContainer>[] localSearchContainers(plasmaSearchQuery query, Set<String> urlselection) {
// search for the set of hashes and return a map of of wordhash:indexContainer containing the seach result

@ -201,6 +201,7 @@ public final class yacySeedDB {
return this.sizeConnected() <= dhtActivityMagic;
}
@SuppressWarnings("unchecked")
private synchronized kelondroMapObjects openSeedTable(File seedDBFile) {
final boolean usetree = false;
new File(seedDBFile.getParent()).mkdirs();
@ -251,6 +252,7 @@ public final class yacySeedDB {
if (seedPotentialDB != null) seedPotentialDB.close();
}
@SuppressWarnings("unchecked")
public void initializeHandler(String mapname, Map map) {
// this is used to set up a lastSeen lookup table

@ -43,13 +43,13 @@ public class ymageGraph {
// a ymageGraph is a set of points and borders between the points
// to reference the points, they must all have a nickname
HashMap points;
HashSet borders;
HashMap<String, coordinate> points;
HashSet<String> borders;
double leftmost, rightmost, topmost, bottommost;
public ymageGraph() {
points = new HashMap();
borders = new HashSet();
points = new HashMap<String, coordinate>();
borders = new HashSet<String>();
leftmost = 1.0;
rightmost = -1.0;
topmost = -1.0;
@ -57,7 +57,7 @@ public class ymageGraph {
}
public coordinate getPoint(String name) {
return (coordinate) points.get(name);
return points.get(name);
}
public coordinate[] getBorder(String name) {
@ -71,7 +71,7 @@ public class ymageGraph {
public coordinate addPoint(String name, double x, double y, int layer) {
coordinate newc = new coordinate(x, y, layer);
coordinate oldc = (coordinate) points.put(name, newc);
coordinate oldc = points.put(name, newc);
assert oldc == null; // all add shall be unique
if (x > rightmost) rightmost = x;
if (x < leftmost) leftmost = x;
@ -85,8 +85,8 @@ public class ymageGraph {
}
public void setBorder(String fromPoint, String toPoint) {
coordinate from = (coordinate) points.get(fromPoint);
coordinate to = (coordinate) points.get(toPoint);
coordinate from = points.get(fromPoint);
coordinate to = points.get(toPoint);
assert from != null;
assert to != null;
borders.add(fromPoint + "$" + toPoint);
@ -108,19 +108,19 @@ public class ymageGraph {
public void print() {
// for debug purpose: print out all coordinates
Iterator i = points.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, coordinate>> i = points.entrySet().iterator();
Map.Entry<String, coordinate> entry;
String name;
coordinate c;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
name = (String) entry.getKey();
c = (coordinate) entry.getValue();
entry = i.next();
name = entry.getKey();
c = entry.getValue();
System.out.println("point(" + c.x + ", " + c.y + ", " + c.layer + ") [" + name + "]");
}
i = borders.iterator();
while (i.hasNext()) {
System.out.println("border(" + i.next() + ")");
Iterator<String> j = borders.iterator();
while (j.hasNext()) {
System.out.println("border(" + j.next() + ")");
}
}
@ -136,15 +136,15 @@ public class ymageGraph {
double yfactor = ((topmost - bottommost) == 0.0) ? 0.0 : (height - topborder - bottomborder) / (topmost - bottommost);
// draw dots and names
Iterator i = points.entrySet().iterator();
Map.Entry entry;
Iterator<Map.Entry<String, coordinate>> i = points.entrySet().iterator();
Map.Entry<String, coordinate> entry;
String name;
coordinate c;
int x, y;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
name = (String) entry.getKey();
c = (coordinate) entry.getValue();
entry = i.next();
name = entry.getKey();
c = entry.getValue();
x = (xfactor == 0.0) ? width / 2 : (int) (leftborder + (c.x - leftmost) * xfactor);
y = (yfactor == 0.0) ? height / 2 : (int) (height - bottomborder - (c.y - bottommost) * yfactor);
image.setColor(color_dot);
@ -154,12 +154,12 @@ public class ymageGraph {
}
// draw lines
i = borders.iterator();
Iterator<String> j = borders.iterator();
coordinate[] border;
image.setColor(color_line);
int x0, x1, y0, y1;
while (i.hasNext()) {
border = getBorder((String) i.next());
while (j.hasNext()) {
border = getBorder(j.next());
if (border == null) continue;
if (xfactor == 0.0) {
x0 = width / 2;

@ -676,14 +676,14 @@ public class ymageMatrix {
return System.currentTimeMillis() - this.access > timeout;
}
}
private static final ArrayList sbbPool = new ArrayList();
private static final ArrayList<sbbBuffer> sbbPool = new ArrayList<sbbBuffer>();
private static serverByteBuffer sbbFromPool(int width, int height, long timeout) {
// returns an Image object from the image pool
// if the pooled Image was created recently (before timeout), it is not used
synchronized (sbbPool) {
sbbBuffer b;
for (int i = 0; i < sbbPool.size(); i++) {
b = (sbbBuffer) sbbPool.get(i);
b = sbbPool.get(i);
if ((b.enoughSize(width, height)) && (b.olderThan(timeout))) {
// use this buffer
b.access = System.currentTimeMillis();

@ -12,7 +12,7 @@ public class ymageToolCircle {
if ((radius - 1) < circles.length) return circles[radius - 1];
// read some lines from known circles
HashSet crds = new HashSet();
HashSet<String> crds = new HashSet<String>();
crds.add("0|0");
String co;
for (int i = Math.max(0, circles.length - 5); i < circles.length; i++) {
@ -28,9 +28,9 @@ public class ymageToolCircle {
// compute more lines in new circles
int x, y;
ArrayList crc;
ArrayList<int[]> crc;
for (int r = circles.length; r < newCircles.length; r++) {
crc = new ArrayList();
crc = new ArrayList<int[]>();
for (int a = 0; a <= 2 * (r + 1); a++) {
x = (int) ((r + 1) * Math.cos(Math.PI * a / (4 * (r + 1))));
y = (int) ((r + 1) * Math.sin(Math.PI * a / (4 * (r + 1))));
@ -52,7 +52,7 @@ public class ymageToolCircle {
newCircles[r] = new int[2 * (crc.size() - 1)];
int[] coords;
for (int i = 0; i < crc.size() - 1; i++) {
coords = (int[]) crc.get(i);
coords = crc.get(i);
newCircles[r][2 * i ] = coords[0];
newCircles[r][2 * i + 1] = coords[1];
//System.out.print(circles[r][i][0] + "," +circles[r][i][1] + "; ");

Loading…
Cancel
Save