work towards new indexing database structure

(no effect on current functionality yet)

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@2277 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 19 years ago
parent 92f4cb4d73
commit 671fd9a5c9

@ -42,7 +42,7 @@ public abstract class indexAbstractContainer implements indexContainer {
return updateTime;
}
public String wordHash() {
public String getWordHash() {
return wordHash;
}

@ -1,3 +1,29 @@
// indexCollectionRI.java
// (C) 2006 by Michael Peter Christen; mc@anomic.de, Frankfurt a. M., Germany
// first published 03.07.2006 on http://www.anomic.de
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
// $LastChangedRevision: 1986 $
// $LastChangedBy: orbiter $
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package de.anomic.index;
import java.io.File;
@ -60,13 +86,24 @@ public class indexCollectionRI extends indexAbstractRI implements indexRI {
}
public indexContainer getContainer(String wordHash, boolean deleteIfEmpty, long maxtime) {
// TODO Auto-generated method stub
return null;
try {
indexRowSetContainer idx = (indexRowSetContainer) collectionIndex.get(wordHash.getBytes());
idx.setWordHash(wordHash);
return idx;
} catch (IOException e) {
e.printStackTrace();
return null;
}
}
public indexContainer deleteContainer(String wordHash) {
// TODO Auto-generated method stub
return null;
indexContainer idx = getContainer(wordHash, true, -1);
try {
collectionIndex.remove(wordHash.getBytes());
} catch (IOException e) {
e.printStackTrace();
}
return idx;
}
public int removeEntries(String wordHash, String[] referenceHashes, boolean deleteComplete) {

@ -34,34 +34,27 @@ import de.anomic.kelondro.kelondroOrder;
public interface indexContainer {
public void setWordHash(String newWordHash);
public void clear();
public int size();
public long updated();
public kelondroOrder order();
public String wordHash();
public void setWordHash(String newWordHash);
public String getWordHash();
public int add(indexEntry entry);
public void setOrdering(kelondroOrder newOrder, int newColumn);
public kelondroOrder getOrdering();
public int getOrderColumn();
public int add(indexEntry entry);
public int add(indexEntry entry, long updateTime);
public int add(indexEntry[] entries, long updateTime);
public int add(indexContainer c, long maxTime);
public boolean contains(String urlHash) ;
public indexEntry get(String urlHash);
public indexEntry[] getEntryArray() ;
public indexEntry remove(String urlHash);
public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete);
public Iterator entries();

@ -26,12 +26,15 @@
package de.anomic.index;
import de.anomic.kelondro.kelondroRow;
public interface indexEntry {
public Object clone();
public String toEncodedStringForm();
public byte[] toEncodedByteArrayForm();
public byte[] toEncodedByteArrayForm(); // shall be replaced by toKelondroEntry()
public String toPropertyForm();
public kelondroRow.Entry toKelondroEntry();
public String getUrlHash();
public void combineDistance(indexEntry oe);

@ -116,7 +116,7 @@ public final class indexRAMCacheRI extends indexAbstractRI implements indexRI {
Iterator ci = container.entries();
while (ci.hasNext()) {
wordEntry = (indexURLEntry) ci.next();
row.setCol(0, container.wordHash().getBytes());
row.setCol(0, container.getWordHash().getBytes());
row.setCol(1, kelondroNaturalOrder.encodeLong(container.size(), 4));
row.setCol(2, kelondroNaturalOrder.encodeLong(container.updated(), 8));
row.setCol(3, wordEntry.getUrlHash().getBytes());
@ -410,7 +410,7 @@ public final class indexRAMCacheRI extends indexAbstractRI implements indexRI {
added = container.size();
} else synchronized (wCache) {
// put container into wCache
String wordHash = container.wordHash();
String wordHash = container.getWordHash();
indexTreeMapContainer entries = (indexTreeMapContainer) wCache.get(wordHash); // null pointer exception? wordhash != null! must be cache==null
if (entries == null) entries = new indexTreeMapContainer(wordHash);
added = entries.add(container, -1);

@ -0,0 +1,107 @@
// indexRowSetContainer.java
// (C) 2006 by Michael Peter Christen; mc@anomic.de, Frankfurt a. M., Germany
// first published 04.07.2006 on http://www.anomic.de
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
// $LastChangedRevision: 1986 $
// $LastChangedBy: orbiter $
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package de.anomic.index;
import java.util.Iterator;
import de.anomic.kelondro.kelondroRow;
import de.anomic.kelondro.kelondroRowSet;
public class indexRowSetContainer extends kelondroRowSet implements indexContainer {
private String wordHash;
public indexRowSetContainer(kelondroRow rowdef) {
super(rowdef);
}
public void setWordHash(String newWordHash) {
this.wordHash = newWordHash;
}
public long updated() {
return super.lastWrote();
}
public String getWordHash() {
return wordHash;
}
public int add(indexEntry entry) {
this.add(entry.toKelondroEntry());
return 1;
}
public int add(indexEntry entry, long updateTime) {
this.add(entry);
this.lastTimeWrote = updateTime;
return 1;
}
public int add(indexEntry[] entries, long updateTime) {
for (int i = 0; i < entries.length; i++) this.add(entries[i], updateTime);
return entries.length;
}
public int add(indexContainer c, long maxTime) {
// TODO Auto-generated method stub
return 0;
}
public boolean contains(String urlHash) {
// TODO Auto-generated method stub
return false;
}
public indexEntry get(String urlHash) {
// TODO Auto-generated method stub
return null;
}
public indexEntry[] getEntryArray() {
// TODO Auto-generated method stub
return null;
}
public indexEntry remove(String urlHash) {
// TODO Auto-generated method stub
return null;
}
public int removeEntries(String wordHash, String[] urlHashes, boolean deleteComplete) {
// TODO Auto-generated method stub
return 0;
}
public Iterator entries() {
// TODO Auto-generated method stub
return null;
}
}

@ -51,15 +51,17 @@ public final class indexTreeMapContainer extends indexAbstractContainer implemen
private final TreeMap container; // urlHash/plasmaWordIndexEntry - Mapping
private long updateTime;
private kelondroOrder ordering;
private int order_column;
public indexTreeMapContainer(String wordHash) {
this(wordHash, new kelondroNaturalOrder(true));
this(wordHash, new kelondroNaturalOrder(true), 0);
}
public indexTreeMapContainer(String wordHash, kelondroOrder ordering) {
public indexTreeMapContainer(String wordHash, kelondroOrder ordering, int column) {
this.wordHash = wordHash;
this.updateTime = 0;
this.ordering = ordering;
this.order_column = column;
container = new TreeMap(ordering); // a urlhash/plasmaWordIndexEntry - relation
}
@ -80,11 +82,20 @@ public final class indexTreeMapContainer extends indexAbstractContainer implemen
return updateTime;
}
public kelondroOrder order() {
return ordering;
public void setOrdering(kelondroOrder newOrder, int newColumn) {
this.ordering = newOrder;
this.order_column = newColumn;
}
public String wordHash() {
public kelondroOrder getOrdering() {
return this.ordering;
}
public int getOrderColumn() {
return this.order_column;
}
public String getWordHash() {
return wordHash;
}
@ -261,7 +272,8 @@ public final class indexTreeMapContainer extends indexAbstractContainer implemen
private static indexContainer joinConstructiveByEnumeration(indexContainer i1, indexContainer i2, long time, int maxDistance) {
System.out.println("DEBUG: JOIN METHOD BY ENUMERATION");
indexTreeMapContainer conj = new indexTreeMapContainer(null); // start with empty search result
if (!(i1.order().signature().equals(i2.order().signature()))) return conj; // ordering must be equal
if (!((i1.getOrdering().signature().equals(i2.getOrdering().signature())) &&
(i1.getOrderColumn() == i2.getOrderColumn()))) return conj; // ordering must be equal
Iterator e1 = i1.entries();
Iterator e2 = i2.entries();
int c;
@ -273,7 +285,7 @@ public final class indexTreeMapContainer extends indexAbstractContainer implemen
long stamp = System.currentTimeMillis();
while ((System.currentTimeMillis() - stamp) < time) {
c = i1.order().compare(ie1.getUrlHash(), ie2.getUrlHash());
c = i1.getOrdering().compare(ie1.getUrlHash(), ie2.getUrlHash());
//System.out.println("** '" + ie1.getUrlHash() + "'.compareTo('" + ie2.getUrlHash() + "')="+c);
if (c < 0) {
if (e1.hasNext()) ie1 = (indexURLEntry) e1.next(); else break;

@ -39,6 +39,7 @@ import de.anomic.index.indexURL;
import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroColumn;
import de.anomic.kelondro.kelondroRow;
import de.anomic.kelondro.kelondroRow.Entry;
import de.anomic.plasma.plasmaWordIndex;
public final class indexURLEntry extends indexAbstractEntry implements Cloneable, indexEntry {
@ -177,6 +178,11 @@ public final class indexURLEntry extends indexAbstractEntry implements Cloneable
public byte[] toEncodedByteArrayForm() {
return toEncodedStringForm().getBytes();
}
public Entry toKelondroEntry() {
kelondroRow.Entry entry = urlEntryRow.newEntry(toEncodedByteArrayForm());
return entry;
}
public String toPropertyForm() {
StringBuffer str = new StringBuffer(61);

@ -901,14 +901,14 @@ public class kelondroRecords {
} else {
System.out.println("### cache report: " + cacheHeaders.size() + " entries");
Iterator i = cacheHeaders.elements();
byte[] entry;
Iterator i = cacheHeaders.rows();
kelondroRow.Entry entry;
while (i.hasNext()) {
entry = (byte[]) i.next();
entry = (kelondroRow.Entry) i.next();
// print from cache
System.out.print("#C ");
printChunk((byte[]) entry);
printChunk(entry);
System.out.println();
// print from file to compare
@ -925,8 +925,9 @@ public class kelondroRecords {
System.out.println("### end report");
}
private void printChunk(byte[] chunk) {
for (int j = 0; j < chunk.length; j++) System.out.print(chunk[j] + ",");
private void printChunk(kelondroRow.Entry chunk) {
for (int j = 0; j < chunk.columns(); j++)
System.out.print(new String(chunk.getColBytes(j)) + ", ");
}
public final kelondroRow row() {

@ -91,11 +91,11 @@ public class kelondroRowBufferedSet extends kelondroRowSet {
}
}
public Iterator elements() {
public Iterator rows() {
synchronized (buffer) {
flush();
}
return super.elements();
return super.rows();
}
public void uniq() {

@ -151,11 +151,11 @@ public class kelondroRowCollection {
synchronized(chunkcache) {
ensureSize(chunkcount + c.size());
}
Iterator i = c.elements();
byte[] b;
Iterator i = c.rows();
kelondroRow.Entry entry;
while (i.hasNext()) {
b = (byte[]) i.next();
add(b, 0, b.length);
entry = (kelondroRow.Entry) i.next();
add(entry);
}
}
@ -193,37 +193,34 @@ public class kelondroRowCollection {
return chunkcount;
}
public Iterator elements() { // iterates byte[] - objects
return new chunkIterator();
public Iterator rows() {
return new rowIterator();
}
public class chunkIterator implements Iterator {
public class rowIterator implements Iterator {
int c = 0;
private int p;
public chunkIterator() {
c = 0;
public rowIterator() {
p = 0;
}
public boolean hasNext() {
return c < chunkcount;
return p < chunkcount;
}
public Object next() {
byte[] chunk = new byte[rowdef.objectsize()];
System.arraycopy(chunkcache, c * rowdef.objectsize(), chunk, 0, rowdef.objectsize());
c++;
return chunk;
return get(p++);
}
public void remove() {
c--;
System.arraycopy(chunkcache, (c + 1) * rowdef.objectsize(), chunkcache, c * rowdef.objectsize(), (chunkcount - c - 1) * rowdef.objectsize());
p--;
System.arraycopy(chunkcache, (p + 1) * rowdef.objectsize(), chunkcache, p * rowdef.objectsize(), (chunkcount - p - 1) * rowdef.objectsize());
chunkcount--;
}
}
protected final void sort(kelondroOrder newOrder, int newColumn) {
if ((this.sortOrder == null) ||
(!(this.sortOrder.signature().equals(newOrder.signature()))) ||
@ -355,9 +352,9 @@ public class kelondroRowCollection {
public String toString() {
StringBuffer s = new StringBuffer();
Iterator i = elements();
if (i.hasNext()) s.append(new String((byte[]) i.next()).trim());
while (i.hasNext()) s.append(", " + new String((byte[]) i.next()).trim());
Iterator i = rows();
if (i.hasNext()) s.append(((kelondroRow.Entry) i.next()).toString());
while (i.hasNext()) s.append(", " + ((kelondroRow.Entry) i.next()).toString());
return new String(s);
}

@ -188,19 +188,15 @@ public class kelondroRowSet extends kelondroRowCollection implements kelondroInd
public void removeMarkedAll(kelondroRowCollection c) {
long handle = profile.startDelete();
Iterator i = c.elements();
byte[] b;
Iterator i = c.rows();
kelondroRow.Entry entry;
while (i.hasNext()) {
b = (byte[]) i.next();
removeMarked(b, 0, b.length);
entry = (kelondroRow.Entry) i.next();
removeMarked(entry.bytes(), 0, entry.bytes().length);
}
profile.stopDelete(handle);
}
public kelondroOrder getOrdering() {
return this.sortOrder;
}
public void setOrdering(kelondroOrder newOrder, int newColumn) {
if ((this.sortOrder == null) ||
(!(this.sortOrder.signature().equals(newOrder.signature()))) ||
@ -211,6 +207,14 @@ public class kelondroRowSet extends kelondroRowCollection implements kelondroInd
}
}
public kelondroOrder getOrdering() {
return this.sortOrder;
}
public int getOrderColumn() {
return this.sortColumn;
}
private int find(byte[] a, int astart, int alength) {
// returns the chunknumber; -1 if not found
@ -301,6 +305,11 @@ public class kelondroRowSet extends kelondroRowCollection implements kelondroInd
return profile;
}
public Iterator rows() {
shape();
return super.rows();
}
public Iterator rows(boolean up, boolean rotating, byte[] firstKey) throws IOException {
return new rowIterator(up, rotating, firstKey);
}

@ -199,7 +199,7 @@ public class plasmaDHTChunk {
double maximumDistance = ((double) peerRedundancy * 2) / ((double) yacyCore.seedDB.sizeConnected());
while ((maxcount > refcount) && (wordHashIterator.hasNext()) && ((nexthash = (String) wordHashIterator.next()) != null) && (nexthash.trim().length() > 0)
&& ((tmpContainers.size() == 0) || (yacyDHTAction.dhtDistance(nexthash, ((indexTreeMapContainer) tmpContainers.get(0)).wordHash()) < maximumDistance))) {
&& ((tmpContainers.size() == 0) || (yacyDHTAction.dhtDistance(nexthash, ((indexTreeMapContainer) tmpContainers.get(0)).getWordHash()) < maximumDistance))) {
// make an on-the-fly entity and insert values
indexContainer = wordIndex.getContainer(nexthash, true, 10000);
int notBoundCounter = 0;
@ -232,7 +232,7 @@ public class plasmaDHTChunk {
}
// use whats left
log.logFine("Selected partial index (" + indexContainer.size() + " from " + wordIndex.indexSize(nexthash) + " URLs, " + notBoundCounter + " not bound) for word " + indexContainer.wordHash());
log.logFine("Selected partial index (" + indexContainer.size() + " from " + wordIndex.indexSize(nexthash) + " URLs, " + notBoundCounter + " not bound) for word " + indexContainer.getWordHash());
tmpContainers.add(indexContainer);
} catch (kelondroException e) {
log.logSevere("plasmaWordIndexDistribution/2: deleted DB for word " + nexthash, e);
@ -281,8 +281,8 @@ public class plasmaDHTChunk {
indexEntry = (indexURLEntry) urlIter.next();
urlHashes[c++] = indexEntry.getUrlHash();
}
count += wordIndex.removeEntries(this.indexContainers[i].wordHash(), urlHashes, true);
log.logFine("Deleted partial index (" + c + " URLs) for word " + this.indexContainers[i].wordHash() + "; " + this.wordIndex.indexSize(indexContainers[i].wordHash()) + " entries left");
count += wordIndex.removeEntries(this.indexContainers[i].getWordHash(), urlHashes, true);
log.logFine("Deleted partial index (" + c + " URLs) for word " + this.indexContainers[i].getWordHash() + "; " + this.wordIndex.indexSize(indexContainers[i].getWordHash()) + " entries left");
this.indexContainers[i] = null;
}
return count;

@ -145,7 +145,7 @@ public class plasmaDHTFlush extends Thread {
plasmaDHTTransfer workerThread = this.worker;
if (workerThread != null) {
return new String[]{"[" + this.oldStartingPointHash + ".." + this.startPointHash + "]",
"[" + workerThread.dhtChunk.firstContainer().wordHash() + ".." + workerThread.dhtChunk.lastContainer().wordHash() + "]"};
"[" + workerThread.dhtChunk.firstContainer().getWordHash() + ".." + workerThread.dhtChunk.lastContainer().getWordHash() + "]"};
}
return new String[]{"[" + this.oldStartingPointHash + ".." + this.startPointHash + "]","[------------..------------]"};
}
@ -189,9 +189,9 @@ public class plasmaDHTFlush extends Thread {
// getting start point for next DHT-selection
this.oldStartingPointHash = this.startPointHash;
this.startPointHash = newDHTChunk.lastContainer().wordHash(); // DHT targets must have greater hashes
this.startPointHash = newDHTChunk.lastContainer().getWordHash(); // DHT targets must have greater hashes
this.log.logInfo("Index selection of " + newDHTChunk.indexCount() + " words [" + newDHTChunk.firstContainer().wordHash() + " .. " + newDHTChunk.lastContainer().wordHash() + "]" +
this.log.logInfo("Index selection of " + newDHTChunk.indexCount() + " words [" + newDHTChunk.firstContainer().getWordHash() + " .. " + newDHTChunk.lastContainer().getWordHash() + "]" +
" in " +
(newDHTChunk.getSelectionTime() / 1000) + " seconds (" +
(1000 * newDHTChunk.indexCount() / (newDHTChunk.getSelectionTime()+1)) + " words/s)");

@ -160,7 +160,7 @@ public class plasmaDHTTransfer extends Thread {
this.payloadSize = ((Integer)result.get("payloadSize")).intValue();
this.log.logInfo("Index transfer of " + this.dhtChunk.indexCount() +
" words [" + this.dhtChunk.firstContainer().wordHash() + " .. " + this.dhtChunk.lastContainer().wordHash() + "]" +
" words [" + this.dhtChunk.firstContainer().getWordHash() + " .. " + this.dhtChunk.lastContainer().getWordHash() + "]" +
" to peer " + this.seed.getName() + ":" + this.seed.hash +
" in " + (this.transferTime / 1000) +
" seconds successful (" + (1000 * this.dhtChunk.indexCount() / (this.transferTime + 1)) +

@ -2172,7 +2172,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
try {
// find a list of DHT-peers
ArrayList seeds = new ArrayList(Arrays.asList(yacyCore.dhtAgent.getDHTTargets(log, peerCount, 10, dhtChunk.firstContainer().wordHash(), dhtChunk.lastContainer().wordHash(), 0.4)));
ArrayList seeds = new ArrayList(Arrays.asList(yacyCore.dhtAgent.getDHTTargets(log, peerCount, 10, dhtChunk.firstContainer().getWordHash(), dhtChunk.lastContainer().getWordHash(), 0.4)));
if (seeds.size() < peerCount) {
log.logWarning("found not enough (" + seeds.size() + ") peers for distribution");
return false;

@ -135,7 +135,7 @@ public final class plasmaWordIndexAssortment {
//log.logDebug("storeAssortment: wordHash=" + wordHash + ", urlHash=" + entry.getUrlHash() + ", time=" + creationTime);
if (newContainer.size() != assortmentLength) throw new RuntimeException("plasmaWordIndexAssortment.store: wrong container size");
kelondroRow.Entry row = assortments.row().newEntry();
row.setCol(0, newContainer.wordHash().getBytes());
row.setCol(0, newContainer.getWordHash().getBytes());
row.setColLongB256(1, 1);
row.setColLongB256(2, newContainer.updated());
Iterator entries = newContainer.entries();

@ -111,7 +111,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
// if storage was successful, NULL is returned.
if (newContainer.size() > clusterCount) return newContainer; // it will not fit
indexContainer buffer;
while ((buffer = assortments[newContainer.size() - 1].remove(newContainer.wordHash())) != null) {
while ((buffer = assortments[newContainer.size() - 1].remove(newContainer.getWordHash())) != null) {
if (newContainer.add(buffer, -1) == 0) return newContainer; // security check; othervise this loop does not terminate
if (newContainer.size() > clusterCount) return newContainer; // it will not fit
}
@ -153,7 +153,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
indexTreeMapContainer c;
Iterator i = newContainer.entries();
for (int j = clusterStart; j >= 1; j--) {
c = new indexTreeMapContainer(newContainer.wordHash());
c = new indexTreeMapContainer(newContainer.getWordHash());
for (int k = 0; k < j; k++) {
if (i.hasNext()) {
c.add((indexURLEntry) i.next(), newContainer.updated());
@ -183,7 +183,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
int selectedAssortment = testsize - 1;
while (selectedAssortment >= 0) {
if (selectedAssortment + 1 <= need) {
spaces[selectedAssortment] = (assortments[selectedAssortment].get(newContainer.wordHash()) == null) ? (selectedAssortment + 1) : 0;
spaces[selectedAssortment] = (assortments[selectedAssortment].get(newContainer.getWordHash()) == null) ? (selectedAssortment + 1) : 0;
need -= spaces[selectedAssortment];
assert (need >= 0);
if (need == 0) break;
@ -196,7 +196,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
Iterator i = newContainer.entries();
for (int j = testsize - 1; j >= 0; j--) {
if (spaces[j] == 0) continue;
c = new indexTreeMapContainer(newContainer.wordHash());
c = new indexTreeMapContainer(newContainer.getWordHash());
for (int k = 0; k <= j; k++) {
assert (i.hasNext());
c.add((indexURLEntry) i.next(), newContainer.updated());
@ -210,7 +210,7 @@ public final class plasmaWordIndexAssortmentCluster extends indexAbstractRI impl
if (newContainer == null) return null;
// clean up the whole thing and try to insert the container then
newContainer.add(deleteContainer(newContainer.wordHash(), -1), -1);
newContainer.add(deleteContainer(newContainer.getWordHash(), -1), -1);
if (newContainer.size() > clusterCapacity) return newContainer;
storeStretched(newContainer);
return null;

@ -251,7 +251,7 @@ public class plasmaWordIndexFileCluster extends indexAbstractRI implements index
// open file
plasmaWordIndexFile pi = null;
try {
pi = new plasmaWordIndexFile(databaseRoot, container.wordHash(), false);
pi = new plasmaWordIndexFile(databaseRoot, container.getWordHash(), false);
pi.addEntries(container);
// close and return

@ -1001,7 +1001,7 @@ public final class yacyClient {
eenum = indexes[i].entries();
while (eenum.hasNext()) {
entry = (indexURLEntry) eenum.next();
entrypost.append(indexes[i].wordHash())
entrypost.append(indexes[i].getWordHash())
.append(entry.toPropertyForm())
.append(serverCore.crlfString);
indexcount++;

Loading…
Cancel
Save