- updated references to indexContainer

- more bugfixes and debugging for indexAbstract processing

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@2555 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 19 years ago
parent 0bed3b9ac3
commit 75b198bc02

@ -164,25 +164,27 @@ public final class search {
prop.put("indexcount", new String(indexcount));
}
// join and order the result
indexContainer localResults = theSearch.localSearchJoin(containers.values());
int joincount = localResults.size();
prop.put("joincount", Integer.toString(joincount));
plasmaSearchResult acc = theSearch.order(localResults);
// generate compressed index for maxcounthash
// this is not needed if the search is restricted to specific urls, because it is a re-search
if ((maxcounthash == null) || (urls.length() != 0)) {
if ((maxcounthash == null) || (urls.length() != 0) || (keyhashes.size() == 1)) {
prop.put("indexabstract","");
} else {
String indexabstract = "indexabstract." + maxcounthash + "=" + indexURL.compressIndex(((indexContainer) containers.get(maxcounthash)), 1000).toString() + serverCore.crlfString;
String indexabstract = "indexabstract." + maxcounthash + "=" + indexURL.compressIndex(((indexContainer) containers.get(maxcounthash)), localResults, 1000).toString() + serverCore.crlfString;
if ((neardhthash != null) && (!(neardhthash.equals(maxcounthash)))) {
indexabstract += "indexabstract." + neardhthash + "=" + indexURL.compressIndex(((indexContainer) containers.get(neardhthash)), 1000).toString() + serverCore.crlfString;
indexabstract += "indexabstract." + neardhthash + "=" + indexURL.compressIndex(((indexContainer) containers.get(neardhthash)), localResults, 1000).toString() + serverCore.crlfString;
}
System.out.println("DEBUG-ABSTRACTGENERATION: maxcounthash = " + maxcounthash);
System.out.println("DEBUG-ABSTRACTGENERATION: neardhthash = " + neardhthash);
//yacyCore.log.logFine("DEBUG HASH SEARCH: " + indexabstract);
prop.put("indexabstract", indexabstract);
}
// join and order the result
indexContainer localResults = theSearch.localSearchJoin(containers.values());
int joincount = localResults.size();
prop.put("joincount", Integer.toString(joincount));
plasmaSearchResult acc = theSearch.order(localResults);
// prepare result
if ((joincount == 0) || (acc == null)) {

@ -30,7 +30,7 @@ package de.anomic.index;
public abstract class indexAbstractRI implements indexRI {
public indexContainer addEntry(String wordHash, indexEntry newEntry, long updateTime, boolean dhtCase) {
indexContainer container = new indexRowSetContainer(wordHash);
indexContainer container = new indexContainer(wordHash);
container.add(newEntry);
return addEntries(container, updateTime, dhtCase);
}

@ -99,7 +99,7 @@ public class indexCollectionRI extends indexAbstractRI implements indexRI {
byte[] key = (byte[]) oo[0];
kelondroRowSet collection = (kelondroRowSet) oo[1];
if (collection == null) return null;
return new indexRowSetContainer(new String(key), collection);
return new indexContainer(new String(key), collection);
}
public void remove() {
@ -113,7 +113,7 @@ public class indexCollectionRI extends indexAbstractRI implements indexRI {
kelondroRowSet collection = collectionIndex.get(wordHash.getBytes(), deleteIfEmpty);
if (collection != null) collection.select(urlselection);
if ((collection == null) || (collection.size() == 0)) return null;
return new indexRowSetContainer(wordHash, collection);
return new indexContainer(wordHash, collection);
} catch (IOException e) {
return null;
}
@ -123,7 +123,7 @@ public class indexCollectionRI extends indexAbstractRI implements indexRI {
try {
kelondroRowSet collection = collectionIndex.delete(wordHash.getBytes());
if (collection == null) return null;
return new indexRowSetContainer(wordHash, collection);
return new indexContainer(wordHash, collection);
} catch (IOException e) {
return null;
}

@ -39,20 +39,20 @@ import de.anomic.kelondro.kelondroOrder;
import de.anomic.kelondro.kelondroRow;
import de.anomic.kelondro.kelondroRowSet;
public class indexRowSetContainer extends kelondroRowSet implements indexContainer {
public class indexContainer extends kelondroRowSet {
private String wordHash;
public indexRowSetContainer(String wordHash) {
public indexContainer(String wordHash) {
this(wordHash, new kelondroNaturalOrder(true), 0);
}
public indexRowSetContainer(String wordHash, kelondroRowSet collection) {
public indexContainer(String wordHash, kelondroRowSet collection) {
super(collection);
this.wordHash = wordHash;
}
public indexRowSetContainer(String wordHash, kelondroOrder ordering, int column) {
public indexContainer(String wordHash, kelondroOrder ordering, int column) {
super(indexURLEntry.urlEntryRow);
this.wordHash = wordHash;
this.lastTimeWrote = 0;
@ -60,7 +60,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
}
public indexContainer topLevelClone() {
indexContainer newContainer = new indexRowSetContainer(this.wordHash, this.sortOrder, this.sortColumn);
indexContainer newContainer = new indexContainer(this.wordHash, this.sortOrder, this.sortColumn);
newContainer.add(this, -1);
return newContainer;
}
@ -220,7 +220,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
singleContainer = (indexContainer) i.next();
// check result
if ((singleContainer == null) || (singleContainer.size() == 0)) return new indexRowSetContainer(null); // as this is a cunjunction of searches, we have no result if any word is not known
if ((singleContainer == null) || (singleContainer.size() == 0)) return new indexContainer(null); // as this is a cunjunction of searches, we have no result if any word is not known
// store result in order of result size
map.put(new Long(singleContainer.size() * 1000 + count), singleContainer);
@ -228,7 +228,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
}
// check if there is any result
if (map.size() == 0) return new indexRowSetContainer(null); // no result, nothing found
if (map.size() == 0) return new indexContainer(null); // no result, nothing found
// the map now holds the search results in order of number of hits per word
// we now must pairwise build up a conjunction of these sets
@ -240,14 +240,14 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
time -= (System.currentTimeMillis() - stamp); stamp = System.currentTimeMillis();
searchA = searchResult;
searchB = (indexContainer) map.remove(k);
searchResult = indexRowSetContainer.joinConstructive(searchA, searchB, 2 * time / (map.size() + 1), maxDistance);
searchResult = indexContainer.joinConstructive(searchA, searchB, 2 * time / (map.size() + 1), maxDistance);
// free resources
searchA = null;
searchB = null;
}
// in 'searchResult' is now the combined search result
if (searchResult.size() == 0) return new indexRowSetContainer(null);
if (searchResult.size() == 0) return new indexContainer(null);
return searchResult;
}
@ -260,7 +260,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
public static indexContainer joinConstructive(indexContainer i1, indexContainer i2, long time, int maxDistance) {
if ((i1 == null) || (i2 == null)) return null;
if ((i1.size() == 0) || (i2.size() == 0)) return new indexRowSetContainer(null);
if ((i1.size() == 0) || (i2.size() == 0)) return new indexContainer(null);
// decide which method to use
int high = ((i1.size() > i2.size()) ? i1.size() : i2.size());
@ -281,7 +281,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
private static indexContainer joinConstructiveByTest(indexContainer small, indexContainer large, long time, int maxDistance) {
System.out.println("DEBUG: JOIN METHOD BY TEST");
indexContainer conj = new indexRowSetContainer(null); // start with empty search result
indexContainer conj = new indexContainer(null); // start with empty search result
Iterator se = small.entries();
indexEntry ie0, ie1;
long stamp = System.currentTimeMillis();
@ -299,7 +299,7 @@ public class indexRowSetContainer extends kelondroRowSet implements indexContain
private static indexContainer joinConstructiveByEnumeration(indexContainer i1, indexContainer i2, long time, int maxDistance) {
System.out.println("DEBUG: JOIN METHOD BY ENUMERATION");
indexContainer conj = new indexRowSetContainer(null); // start with empty search result
indexContainer conj = new indexContainer(null); // start with empty search result
if (!((i1.order().signature().equals(i2.order().signature())) &&
(i1.orderColumn() == i2.orderColumn()))) return conj; // ordering must be equal
Iterator e1 = i1.entries();

@ -474,7 +474,7 @@ public final class indexRAMCacheRI extends indexAbstractRI implements indexRI {
// put container into wCache
String wordHash = container.getWordHash();
indexContainer entries = (indexContainer) wCache.get(wordHash); // null pointer exception? wordhash != null! must be cache==null
if (entries == null) entries = new indexRowSetContainer(wordHash);
if (entries == null) entries = new indexContainer(wordHash);
added = entries.add(container, -1);
if (added > 0) {
wCache.put(wordHash, entries);
@ -489,7 +489,7 @@ public final class indexRAMCacheRI extends indexAbstractRI implements indexRI {
public indexContainer addEntry(String wordHash, indexEntry newEntry, long updateTime, boolean dhtCase) {
if (dhtCase) synchronized (kCache) {
// put container into kCache
indexContainer container = new indexRowSetContainer(wordHash);
indexContainer container = new indexContainer(wordHash);
container.add(newEntry);
kCache.put(new Long(updateTime + kCacheInc), container);
kCacheInc++;
@ -497,7 +497,7 @@ public final class indexRAMCacheRI extends indexAbstractRI implements indexRI {
return null;
} else synchronized (wCache) {
indexContainer container = (indexContainer) wCache.get(wordHash);
if (container == null) container = new indexRowSetContainer(wordHash);
if (container == null) container = new indexContainer(wordHash);
indexEntry[] entries = new indexEntry[] { newEntry };
if (container.add(entries, updateTime) > 0) {
wCache.put(wordHash, container);

@ -639,16 +639,17 @@ public class indexURL {
}
public static final serverByteBuffer compressIndex(indexContainer container, long maxtime) {
public static final serverByteBuffer compressIndex(indexContainer inputContainer, indexContainer excludeContainer, long maxtime) {
// collect references according to domains
long timeout = (maxtime < 0) ? Long.MAX_VALUE : System.currentTimeMillis() + maxtime;
TreeMap doms = new TreeMap();
synchronized(container) {
Iterator i = container.entries();
synchronized(inputContainer) {
Iterator i = inputContainer.entries();
indexEntry iEntry;
String dom, paths;
while (i.hasNext()) {
iEntry = (indexEntry) i.next();
if (excludeContainer.get(iEntry.urlHash()) != null) continue; // do not include urls that are in excludeContainer
dom = iEntry.urlHash().substring(6);
if ((paths = (String) doms.get(dom)) == null) {
doms.put(dom, iEntry.urlHash().substring(0, 6));
@ -659,7 +660,7 @@ public class indexURL {
}
}
// construct a result string
serverByteBuffer bb = new serverByteBuffer(container.size() * 6);
serverByteBuffer bb = new serverByteBuffer(inputContainer.size() * 6);
bb.append('{');
Iterator i = doms.entrySet().iterator();
Map.Entry entry;

@ -45,8 +45,11 @@ import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.IOException;
import java.io.FileInputStream;
import java.util.Collection;
import java.util.Comparator;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
@ -81,6 +84,49 @@ public class kelondroMSetTools {
// - join by pairvise enumeration
// - join by iterative tests (where we distinguish left-right and right-left tests)
public static TreeMap joinConstructive(Collection maps) {
// this joins all TreeMap(s) contained in maps
// first order entities by their size
TreeMap orderMap = new TreeMap();
TreeMap singleMap;
Iterator i = maps.iterator();
int count = 0;
while (i.hasNext()) {
// get next entity:
singleMap = (TreeMap) i.next();
// check result
if ((singleMap == null) || (singleMap.size() == 0)) return new TreeMap();
// store result in order of result size
orderMap.put(new Long(singleMap.size() * 1000 + count), singleMap);
count++;
}
// check if there is any result
if (orderMap.size() == 0) return new TreeMap();
// we now must pairwise build up a conjunction of these maps
Long k = (Long) orderMap.firstKey(); // the smallest, which means, the one with the least entries
TreeMap mapA, mapB, joinResult = (TreeMap) orderMap.remove(k);
while ((orderMap.size() > 0) && (joinResult.size() > 0)) {
// take the first element of map which is a result and combine it with result
k = (Long) orderMap.firstKey(); // the next smallest...
mapA = joinResult;
mapB = (TreeMap) orderMap.remove(k);
joinResult = joinConstructiveByTestSetInMap(mapB, mapA.keySet());
// free resources
mapA = null;
mapB = null;
}
// in 'searchResult' is now the combined search result
if (joinResult.size() == 0) return new TreeMap();
return joinResult;
}
public static TreeMap joinConstructive(TreeMap map, TreeSet set) {
// comparators must be equal
if ((map == null) || (set == null)) return null;
@ -95,13 +141,13 @@ public class kelondroMSetTools {
// start most efficient method
if (stepsEnum > stepsTest) {
if (map.size() < set.size()) return joinConstructiveByTestSetInMap(map, set);
if (map.size() > set.size()) return joinConstructiveByTestSetInMap(map, set);
return joinConstructiveByTestMapInSet(map, set);
}
return joinConstructiveByEnumeration(map, set);
}
private static TreeMap joinConstructiveByTestSetInMap(TreeMap map, TreeSet set) {
private static TreeMap joinConstructiveByTestSetInMap(TreeMap map, Set set) {
Iterator si = set.iterator();
TreeMap result = new TreeMap(map.comparator());
Object o;
@ -112,9 +158,9 @@ public class kelondroMSetTools {
return result;
}
private static TreeMap joinConstructiveByTestMapInSet(TreeMap map, TreeSet set) {
private static TreeMap joinConstructiveByTestMapInSet(Map map, TreeSet set) {
Iterator mi = map.keySet().iterator();
TreeMap result = new TreeMap(map.comparator());
TreeMap result = new TreeMap(set.comparator());
Object o;
while (mi.hasNext()) {
o = mi.next();

@ -50,11 +50,11 @@ import java.util.Set;
import java.util.TreeMap;
import de.anomic.kelondro.kelondroException;
import de.anomic.kelondro.kelondroMSetTools;
import de.anomic.server.logging.serverLog;
import de.anomic.yacy.yacySearch;
import de.anomic.index.indexContainer;
import de.anomic.index.indexEntry;
import de.anomic.index.indexRowSetContainer;
public final class plasmaSearchEvent extends Thread implements Runnable {
@ -90,7 +90,7 @@ public final class plasmaSearchEvent extends Thread implements Runnable {
this.ranking = ranking;
this.urlStore = urlStore;
this.snippetCache = snippetCache;
this.rcContainers = new indexRowSetContainer(null);
this.rcContainers = new indexContainer(null);
this.rcContainerCount = 0;
this.rcAbstracts = new TreeMap();
this.profileLocal = localTiming;
@ -148,13 +148,26 @@ public final class plasmaSearchEvent extends Thread implements Runnable {
try {Thread.sleep(100);} catch (InterruptedException e) {}
}
System.out.println("DEBUG-INDEXABSTRACT: " + rcAbstracts.size() + " word references catched, " + query.size() + " needed");
/*
Iterator i = rcAbstracts.entrySet().iterator();
Map.Entry entry;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
System.out.println("DEBUG-INDEXABSTRACT: hash " + (String) entry.getKey() + ": " + ((query.queryHashes.contains((String) entry.getKey())) ? "NEEDED" : "NOT NEEDED") + "; " + ((TreeMap) entry.getValue()).size() + " entries");
}
*/
TreeMap abstractJoin = (rcAbstracts.size() == query.size()) ? kelondroMSetTools.joinConstructive(rcAbstracts.values()) : new TreeMap();
if (abstractJoin.size() == 0) {
System.out.println("DEBUG-INDEXABSTRACT: no success using index abstracts from remote peers");
} else {
System.out.println("DEBUG-INDEXABSTRACT: index abstracts delivered " + abstractJoin.size() + " additional results for secondary search");
Iterator i = abstractJoin.entrySet().iterator();
Map.Entry entry;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
System.out.println("DEBUG-INDEXABSTRACT: url " + (String) entry.getKey() + ": from peers " + (String) entry.getValue());
}
}
// catch up global results:
// wait until primary timeout passed
@ -216,12 +229,12 @@ public final class plasmaSearchEvent extends Thread implements Runnable {
// since this is a conjunction we return an empty entity if any word is not known
if (containers == null) {
return new indexRowSetContainer(null);
return new indexContainer(null);
}
// join the result
profileLocal.startTimer();
indexContainer rcLocal = indexRowSetContainer.joinContainer(containers,
indexContainer rcLocal = indexContainer.joinContainer(containers,
profileLocal.getTargetTime(plasmaSearchTimingProfile.PROCESS_JOIN),
query.maxDistance);
profileLocal.setYieldTime(plasmaSearchTimingProfile.PROCESS_JOIN);
@ -234,7 +247,7 @@ public final class plasmaSearchEvent extends Thread implements Runnable {
// we collect the urlhashes and construct a list with urlEntry objects
// attention: if minEntries is too high, this method will not terminate within the maxTime
indexContainer searchResult = new indexRowSetContainer(null);
indexContainer searchResult = new indexContainer(null);
long preorderTime = profileLocal.getTargetTime(plasmaSearchTimingProfile.PROCESS_PRESORT);
profileLocal.startTimer();

@ -136,7 +136,6 @@ import de.anomic.http.httpc;
import de.anomic.index.indexContainer;
import de.anomic.index.indexEntry;
import de.anomic.index.indexEntryAttribute;
import de.anomic.index.indexRowSetContainer;
import de.anomic.index.indexURL;
import de.anomic.index.indexURLEntry;
import de.anomic.kelondro.kelondroBase64Order;
@ -1638,7 +1637,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
String word = (String) wentry.getKey();
wordStat = (plasmaCondenser.wordStatProp) wentry.getValue();
String wordHash = indexEntryAttribute.word2hash(word);
indexContainer wordIdxContainer = new indexRowSetContainer(wordHash);
indexContainer wordIdxContainer = new indexContainer(wordHash);
indexEntry wordIdxEntry = new indexURLEntry(
urlHash,
urlLength, urlComps,

@ -68,7 +68,6 @@ import de.anomic.index.indexEntryAttribute;
import de.anomic.index.indexRAMCacheRI;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.index.indexRowSetContainer;
import de.anomic.index.indexURLEntry;
import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroException;
@ -426,7 +425,7 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
public indexContainer deleteContainer(String wordHash) {
indexContainer c = ramCache.deleteContainer(wordHash);
if (c == null) c = new indexRowSetContainer(wordHash);
if (c == null) c = new indexContainer(wordHash);
if (useCollectionIndex) c.add(collections.deleteContainer(wordHash), -1);
c.add(assortmentCluster.deleteContainer(wordHash), -1);
c.add(backend.deleteContainer(wordHash), -1);
@ -509,7 +508,7 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
ramCache.wordContainers(startWordHash, false),
collections.wordContainers(startWordHash, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true);
}
if (resourceLevel == plasmaWordIndex.RL_ASSORTMENTS) {
@ -519,18 +518,18 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
ramCache.wordContainers(startWordHash, false),
collections.wordContainers(startWordHash, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true),
assortmentCluster.wordContainers(startWordHash, true, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true);
} else {
return new kelondroMergeIterator(
ramCache.wordContainers(startWordHash, false),
assortmentCluster.wordContainers(startWordHash, true, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true);
}
}
@ -542,15 +541,15 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
ramCache.wordContainers(startWordHash, false),
collections.wordContainers(startWordHash, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true),
assortmentCluster.wordContainers(startWordHash, true, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true),
backend.wordContainers(startWordHash, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true);
} else {
return new kelondroMergeIterator(
@ -558,11 +557,11 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
ramCache.wordContainers(startWordHash, false),
assortmentCluster.wordContainers(startWordHash, true, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true),
backend.wordContainers(startWordHash, false),
new indexContainerOrder(kelondroNaturalOrder.naturalOrder),
indexRowSetContainer.containerMergeMethod,
indexContainer.containerMergeMethod,
true);
}
}
@ -661,7 +660,7 @@ public final class plasmaWordIndex extends indexAbstractRI implements indexRI {
try {
entity = new plasmaWordIndexFile(oldDatabaseRoot, wordhash, true);
int size = entity.size();
indexContainer container = new indexRowSetContainer(wordhash);
indexContainer container = new indexContainer(wordhash);
try {
Iterator entries = entity.elements(true);

@ -59,7 +59,6 @@ import java.util.Iterator;
import de.anomic.index.indexContainer;
import de.anomic.index.indexEntry;
import de.anomic.index.indexEntryAttribute;
import de.anomic.index.indexRowSetContainer;
import de.anomic.index.indexURLEntry;
import de.anomic.kelondro.kelondroColumn;
import de.anomic.kelondro.kelondroException;
@ -224,7 +223,7 @@ public final class plasmaWordIndexAssortment {
if (row == null) return null;
String wordHash = row.getColString(0, null);
final long updateTime = row.getColLong(2);
indexContainer container = new indexRowSetContainer(wordHash);
indexContainer container = new indexContainer(wordHash);
int al = assortmentCapacity(row.objectsize());
for (int i = 0; i < al; i++) {
container.add(

@ -57,7 +57,6 @@ import de.anomic.index.indexContainerOrder;
import de.anomic.index.indexEntry;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.index.indexRowSetContainer;
import de.anomic.kelondro.kelondroNaturalOrder;
import de.anomic.kelondro.kelondroObjectCache;
import de.anomic.kelondro.kelondroRecords;
@ -162,7 +161,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
indexContainer c;
Iterator i = newContainer.entries();
for (int j = clusterStart; j >= 1; j--) {
c = new indexRowSetContainer(newContainer.getWordHash());
c = new indexContainer(newContainer.getWordHash());
for (int k = 0; k < j; k++) {
if (i.hasNext()) {
c.add((indexEntry) i.next(), newContainer.updated());
@ -205,7 +204,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
Iterator i = newContainer.entries();
for (int j = testsize - 1; j >= 0; j--) {
if (spaces[j] == 0) continue;
c = new indexRowSetContainer(newContainer.getWordHash());
c = new indexContainer(newContainer.getWordHash());
for (int k = 0; k <= j; k++) {
assert (i.hasNext());
c.add((indexEntry) i.next(), newContainer.updated());
@ -231,7 +230,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
public indexContainer deleteContainer(String wordHash, long maxTime) {
// removes all records from all the assortments and return them
indexContainer buffer, record = new indexRowSetContainer(wordHash);
indexContainer buffer, record = new indexContainer(wordHash);
long limitTime = (maxTime < 0) ? Long.MAX_VALUE : System.currentTimeMillis() + maxTime;
long remainingTime;
for (int i = 0; i < clusterCount; i++) {
@ -256,7 +255,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
*/
public boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {
indexContainer buffer, record = new indexRowSetContainer(wordHash);
indexContainer buffer, record = new indexContainer(wordHash);
boolean found = false;
for (int i = 0; i < clusterCount; i++) {
buffer = assortments[i].remove(wordHash);
@ -272,7 +271,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
}
public int removeEntries(String wordHash, Set urlHashes, boolean deleteComplete) {
indexContainer buffer, record = new indexRowSetContainer(wordHash);
indexContainer buffer, record = new indexContainer(wordHash);
int initialSize = urlHashes.size();
for (int i = 0; i < clusterCount; i++) {
buffer = assortments[i].remove(wordHash);
@ -297,7 +296,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
public indexContainer getContainer(String wordHash, Set urlselection, boolean deleteIfEmpty, long maxTime) {
// collect all records from all the assortments and return them
indexContainer buffer, record = new indexRowSetContainer(wordHash);
indexContainer buffer, record = new indexContainer(wordHash);
long timeout = (maxTime < 0) ? Long.MAX_VALUE : System.currentTimeMillis() + maxTime;
for (int i = 0; i < clusterCount; i++) {
buffer = assortments[i].get(wordHash);
@ -330,7 +329,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
// iterates indexContainer - Objects
HashSet containerIterators = new HashSet();
for (int i = 0; i < clusterCount; i++) containerIterators.add(assortments[i].containers(startWordHash, up, rot));
return kelondroMergeIterator.cascade(containerIterators, new indexContainerOrder(kelondroNaturalOrder.naturalOrder), indexRowSetContainer.containerMergeMethod, up);
return kelondroMergeIterator.cascade(containerIterators, new indexContainerOrder(kelondroNaturalOrder.naturalOrder), indexContainer.containerMergeMethod, up);
}
public int size() {

@ -54,7 +54,6 @@ import de.anomic.index.indexContainer;
import de.anomic.index.indexEntry;
import de.anomic.index.indexRI;
import de.anomic.index.indexAbstractRI;
import de.anomic.index.indexRowSetContainer;
import de.anomic.kelondro.kelondroNaturalOrder;
import de.anomic.server.logging.serverLog;
import de.anomic.yacy.yacySeedDB;
@ -230,7 +229,7 @@ public class plasmaWordIndexFileCluster extends indexAbstractRI implements index
if ((maxTime < 0) || (maxTime > 60000)) maxTime=60000; // maximum is one minute
if (plasmaWordIndexFile.wordHash2path(databaseRoot, wordHash).exists()) {
plasmaWordIndexFile entity = this.getEntity(wordHash, deleteIfEmpty, (maxTime < 0) ? -1 : maxTime * 9 / 10);
indexContainer container = new indexRowSetContainer(wordHash);
indexContainer container = new indexContainer(wordHash);
indexEntry entry;
Iterator i = entity.elements(true);
while ((i.hasNext()) && (System.currentTimeMillis() < (start + maxTime))) {
@ -239,7 +238,7 @@ public class plasmaWordIndexFileCluster extends indexAbstractRI implements index
}
return container;
} else {
return new indexRowSetContainer(wordHash);
return new indexContainer(wordHash);
}
}
@ -254,7 +253,7 @@ public class plasmaWordIndexFileCluster extends indexAbstractRI implements index
public indexContainer deleteContainer(String wordHash) {
plasmaWordIndexFile.removePlasmaIndex(databaseRoot, wordHash);
return new indexRowSetContainer(wordHash);
return new indexContainer(wordHash);
}
public boolean removeEntry(String wordHash, String urlHash, boolean deleteComplete) {

@ -57,7 +57,6 @@ import de.anomic.http.httpc;
import de.anomic.index.indexContainer;
import de.anomic.index.indexEntry;
import de.anomic.index.indexEntryAttribute;
import de.anomic.index.indexRowSetContainer;
import de.anomic.index.indexURL;
import de.anomic.index.indexURLEntry;
import de.anomic.kelondro.kelondroBase64Order;
@ -479,7 +478,7 @@ public final class yacyClient {
final int words = wordhashes.length() / indexEntryAttribute.wordHashLength;
indexContainer[] container = new indexContainer[words];
for (int i = 0; i < words; i++) {
container[i] = new indexRowSetContainer(wordhashes.substring(i * indexEntryAttribute.wordHashLength, (i + 1) * indexEntryAttribute.wordHashLength));
container[i] = new indexContainer(wordhashes.substring(i * indexEntryAttribute.wordHashLength, (i + 1) * indexEntryAttribute.wordHashLength));
}
// insert results to containers
@ -546,7 +545,7 @@ public final class yacyClient {
singleAbstract = (TreeMap) abstractCache.get(wordhash); // a mapping from url-hashes to a string of peer-hashes
if (singleAbstract == null) singleAbstract = new TreeMap();
ci = new serverByteBuffer(((String) entry.getValue()).getBytes());
System.out.println("DEBUG-ABSTRACTFETCH: for word hash " + wordhash + " received " + ci.toString());
//System.out.println("DEBUG-ABSTRACTFETCH: for word hash " + wordhash + " received " + ci.toString());
indexURL.decompressIndex(singleAbstract, ci, targetPeer.hash);
abstractCache.put(wordhash, singleAbstract);
}

Loading…
Cancel
Save