enhanced buffered write by combination of several chunks into one chunk

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@1206 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 19 years ago
parent 0c762daf4b
commit 3cc02fe749

@ -45,28 +45,28 @@
package de.anomic.kelondro; package de.anomic.kelondro;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.TreeMap;
public final class kelondroBufferedIOChunks extends kelondroAbstractIOChunks implements kelondroIOChunks { public final class kelondroBufferedIOChunks extends kelondroAbstractIOChunks implements kelondroIOChunks {
protected kelondroRA ra; protected kelondroRA ra;
private int bufferMaxSize, bufferCurrSize; private long bufferMaxSize, bufferCurrSize;
private long commitTimeout; private long commitTimeout;
private HashMap buffer; private TreeMap buffer;
private long lastCommit = 0; private long lastCommit = 0;
private static final int overhead = 40; private static final int overhead = 40;
public kelondroBufferedIOChunks(kelondroRA ra, String name, int bufferkb, long commitTimeout) { public kelondroBufferedIOChunks(kelondroRA ra, String name, long buffer, long commitTimeout) {
this.name = name; this.name = name;
this.ra = ra; this.ra = ra;
this.bufferMaxSize = 1024 * bufferkb; this.bufferMaxSize = buffer;
this.bufferCurrSize = 0; this.bufferCurrSize = 0;
this.commitTimeout = commitTimeout; this.commitTimeout = commitTimeout;
this.buffer = new HashMap(); this.buffer = new TreeMap();
this.lastCommit = System.currentTimeMillis(); this.lastCommit = System.currentTimeMillis();
} }
@ -127,18 +127,39 @@ public final class kelondroBufferedIOChunks extends kelondroAbstractIOChunks imp
synchronized (buffer) { synchronized (buffer) {
if (buffer.size() == 0) return; if (buffer.size() == 0) return;
Iterator i = buffer.entrySet().iterator(); Iterator i = buffer.entrySet().iterator();
Map.Entry entry; Map.Entry entry = (Map.Entry) i.next();
long pos; long lastPos = ((Long) entry.getKey()).longValue();
byte[] b; byte[] lastChunk = (byte[]) entry.getValue();
long nextPos;
byte[] nextChunk, tmpChunk;
synchronized (this.ra) { synchronized (this.ra) {
while (i.hasNext()) { while (i.hasNext()) {
entry = (Map.Entry) i.next(); entry = (Map.Entry) i.next();
pos = ((Long) entry.getKey()).longValue(); nextPos = ((Long) entry.getKey()).longValue();
b = (byte[]) entry.getValue(); nextChunk = (byte[]) entry.getValue();
this.ra.seek(pos); if (lastPos + lastChunk.length == nextPos) {
this.ra.write(b); // try to combine the new chunk with the previous chunk
kelondroObjectSpace.recycle(b); //System.out.println("combining chunks pos0=" + lastPos + ", chunk0.length=" + lastChunk.length + ", pos1=" + nextPos + ", chunk1.length=" + nextChunk.length);
tmpChunk = kelondroObjectSpace.alloc(lastChunk.length + nextChunk.length);
System.arraycopy(lastChunk, 0, tmpChunk, 0, lastChunk.length);
System.arraycopy(nextChunk, 0, tmpChunk, lastChunk.length, nextChunk.length);
kelondroObjectSpace.recycle(lastChunk);
lastChunk = tmpChunk;
tmpChunk = null;
kelondroObjectSpace.recycle(nextChunk);
} else {
// write the last chunk and take nextChunk next time als lastChunk
this.ra.seek(lastPos);
this.ra.write(lastChunk);
kelondroObjectSpace.recycle(lastChunk);
lastPos = nextPos;
lastChunk = nextChunk;
}
} }
// at the end write just the last chunk
this.ra.seek(lastPos);
this.ra.write(lastChunk);
kelondroObjectSpace.recycle(lastChunk);
} }
buffer.clear(); buffer.clear();
bufferCurrSize = 0; bufferCurrSize = 0;

@ -209,13 +209,13 @@ public class kelondroRecords {
kelondroRA raf = new kelondroFileRA(this.filename); kelondroRA raf = new kelondroFileRA(this.filename);
// kelondroRA raf = new kelondroBufferedRA(new kelondroFileRA(this.filename), 1024, 100); // kelondroRA raf = new kelondroBufferedRA(new kelondroFileRA(this.filename), 1024, 100);
// kelondroRA raf = new kelondroNIOFileRA(this.filename, false, 10000); // kelondroRA raf = new kelondroNIOFileRA(this.filename, false, 10000);
init(raf, ohbytec, ohhandlec, columns, FHandles, txtProps, txtPropWidth); init(raf, ohbytec, ohhandlec, columns, FHandles, txtProps, txtPropWidth, buffersize / 10);
} catch (IOException e) { } catch (IOException e) {
logFailure("cannot create / " + e.getMessage()); logFailure("cannot create / " + e.getMessage());
if (exitOnFail) if (exitOnFail)
System.exit(-1); System.exit(-1);
} }
initCache(buffersize); initCache(buffersize / 10 * 9);
} }
public kelondroRecords(kelondroRA ra, long buffersize /* bytes */, public kelondroRecords(kelondroRA ra, long buffersize /* bytes */,
@ -224,19 +224,19 @@ public class kelondroRecords {
boolean exitOnFail) { boolean exitOnFail) {
this.filename = null; this.filename = null;
try { try {
init(ra, ohbytec, ohhandlec, columns, FHandles, txtProps, txtPropWidth); init(ra, ohbytec, ohhandlec, columns, FHandles, txtProps, txtPropWidth, buffersize / 10);
} catch (IOException e) { } catch (IOException e) {
logFailure("cannot create / " + e.getMessage()); logFailure("cannot create / " + e.getMessage());
if (exitOnFail) System.exit(-1); if (exitOnFail) System.exit(-1);
} }
initCache(buffersize); initCache(buffersize / 10 * 9);
} }
private void init(kelondroRA ra, short ohbytec, short ohhandlec, private void init(kelondroRA ra, short ohbytec, short ohhandlec,
int[] columns, int FHandles, int txtProps, int txtPropWidth) throws IOException { int[] columns, int FHandles, int txtProps, int txtPropWidth, long writeBufferSize) throws IOException {
// create new Chunked IO // create new Chunked IO
this.entryFile = new kelondroBufferedIOChunks(ra, ra.name(), 1024, 8000 + random.nextLong() % 2000); this.entryFile = new kelondroBufferedIOChunks(ra, ra.name(), writeBufferSize, 30000 + random.nextLong() % 30000);
//this.entryFile = new kelondroRAIOChunks(ra, ra.name()); //this.entryFile = new kelondroRAIOChunks(ra, ra.name());
// store dynamic run-time data // store dynamic run-time data
@ -334,19 +334,19 @@ public class kelondroRecords {
//kelondroRA raf = new kelondroBufferedRA(new kelondroFileRA(this.filename), 1024, 100); //kelondroRA raf = new kelondroBufferedRA(new kelondroFileRA(this.filename), 1024, 100);
//kelondroRA raf = new kelondroCachedRA(new kelondroFileRA(this.filename), 5000000, 1000); //kelondroRA raf = new kelondroCachedRA(new kelondroFileRA(this.filename), 5000000, 1000);
//kelondroRA raf = new kelondroNIOFileRA(this.filename, (file.length() < 4000000), 10000); //kelondroRA raf = new kelondroNIOFileRA(this.filename, (file.length() < 4000000), 10000);
init(raf); init(raf, buffersize / 10);
initCache(buffersize); initCache(buffersize / 10 * 9);
} }
public kelondroRecords(kelondroRA ra, long buffersize) throws IOException{ public kelondroRecords(kelondroRA ra, long buffersize) throws IOException{
this.filename = null; this.filename = null;
init(ra); init(ra, buffersize / 10);
initCache(buffersize); initCache(buffersize / 10 * 9);
} }
private void init(kelondroRA ra) throws IOException { private void init(kelondroRA ra, long writeBufferSize) throws IOException {
// read from Chunked IO // read from Chunked IO
this.entryFile = new kelondroBufferedIOChunks(ra, ra.name(), 1024, 8000 + random.nextLong() % 2000); this.entryFile = new kelondroBufferedIOChunks(ra, ra.name(), writeBufferSize, 30000 + random.nextLong() % 30000);
//this.entryFile = new kelondroRAIOChunks(ra, ra.name()); //this.entryFile = new kelondroRAIOChunks(ra, ra.name());
// read dynamic variables that are back-ups of stored values in file; // read dynamic variables that are back-ups of stored values in file;

Loading…
Cancel
Save