enahnced database access times by removal of unnecessary synchronization.

added also more hacks that resulted from high-volum query testing

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6047 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 16 years ago
parent 5a7dec880e
commit 733385cdd7

@ -467,8 +467,14 @@ public class yacysearch {
sb.localSearches.add(theQuery);
// update the search tracker
trackerHandles.add(theQuery.handle);
sb.localSearchTracker.put(client, trackerHandles);
try {
trackerHandles.add(theQuery.handle);
if (trackerHandles.size() > 1000) trackerHandles.remove(trackerHandles.first());
sb.localSearchTracker.put(client, trackerHandles);
if (sb.localSearchTracker.size() > 1000) sb.localSearchTracker.remove(sb.localSearchTracker.keys().nextElement());
} catch (Exception e) {
e.printStackTrace();
}
final int totalcount = theSearch.getRankingResult().getLocalResourceSize() + theSearch.getRankingResult().getRemoteResourceSize();
prop.put("num-results_offset", offset);

@ -480,7 +480,7 @@ public class BLOBArray implements BLOB {
* @return
* @throws IOException
*/
public synchronized Iterable<byte[]> getAll(byte[] key) throws IOException {
public Iterable<byte[]> getAll(byte[] key) throws IOException {
/*
byte[] b;
ArrayList<byte[]> l = new ArrayList<byte[]>(blobs.size());

@ -276,32 +276,30 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
* cache control methods
*/
private synchronized void cleanCache() {
private void cleanCache() {
// dump the cache if necessary
if (this.ram.size() >= this.maxRamEntries || (this.ram.size() > 3000 && !MemoryControl.request(80L * 1024L * 1024L, false))) {
try {
cacheDump();
} catch (IOException e) {
e.printStackTrace();
}
}
synchronized (this) {
if (this.ram.size() >= this.maxRamEntries || (this.ram.size() > 3000 && !MemoryControl.request(80L * 1024L * 1024L, false))) {
// dump the ram
File dumpFile = this.array.newContainerBLOBFile();
//this.ram.dump(dumpFile, true);
//this.array.mountBLOBContainer(dumpFile);
merger.dump(this.ram, dumpFile, array);
// get a fresh ram cache
this.ram = new ReferenceContainerCache<ReferenceType>(factory, this.array.rowdef(), this.array.ordering());
this.ram.initWriteMode();
}
}
// clean-up the cache
if (this.array.entries() < 50 && (this.lastCleanup + cleanupCycle > System.currentTimeMillis())) return;
//System.out.println("----cleanup check");
this.array.shrink(this.targetFileSize, this.maxFileSize);
this.lastCleanup = System.currentTimeMillis();
}
private synchronized void cacheDump() throws IOException {
// dump the ram
File dumpFile = this.array.newContainerBLOBFile();
//this.ram.dump(dumpFile, true);
//this.array.mountBLOBContainer(dumpFile);
merger.dump(this.ram, dumpFile, array);
// get a fresh ram cache
this.ram = new ReferenceContainerCache<ReferenceType>(factory, this.array.rowdef(), this.array.ordering());
this.ram.initWriteMode();
synchronized (this) {
if (this.array.entries() > 50 || (this.lastCleanup + cleanupCycle < System.currentTimeMillis())) {
//System.out.println("----cleanup check");
this.array.shrink(this.targetFileSize, this.maxFileSize);
this.lastCleanup = System.currentTimeMillis();
}
}
}
public File newContainerBLOBFile() {

@ -195,7 +195,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
* @return the indexContainer if one exist, null otherwise
* @throws IOException
*/
public synchronized ReferenceContainer<ReferenceType> get(final byte[] termHash) throws IOException {
public ReferenceContainer<ReferenceType> get(final byte[] termHash) throws IOException {
long timeout = System.currentTimeMillis() + 1000;
Iterator<byte[]> entries = this.array.getAll(termHash).iterator();
if (entries == null || !entries.hasNext()) return null;

@ -109,6 +109,7 @@ import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
import de.anomic.content.DCEntry;
@ -247,7 +248,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
public ImporterManager dbImportManager;
public ArrayList<plasmaSearchQuery> localSearches; // array of search result properties as HashMaps
public ArrayList<plasmaSearchQuery> remoteSearches; // array of search result properties as HashMaps
public HashMap<String, TreeSet<Long>> localSearchTracker, remoteSearchTracker; // mappings from requesting host to a TreeSet of Long(access time)
public ConcurrentHashMap<String, TreeSet<Long>> localSearchTracker, remoteSearchTracker; // mappings from requesting host to a TreeSet of Long(access time)
public long indexedPages = 0;
public double requestedQueries = 0d;
public double totalQPM = 0d;
@ -551,8 +552,8 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
this.incomingCookies = new HashMap<String, Object[]>();
// init search history trackers
this.localSearchTracker = new HashMap<String, TreeSet<Long>>(); // String:TreeSet - IP:set of Long(accessTime)
this.remoteSearchTracker = new HashMap<String, TreeSet<Long>>();
this.localSearchTracker = new ConcurrentHashMap<String, TreeSet<Long>>(); // String:TreeSet - IP:set of Long(accessTime)
this.remoteSearchTracker = new ConcurrentHashMap<String, TreeSet<Long>>();
this.localSearches = new ArrayList<plasmaSearchQuery>(); // contains search result properties as HashMaps
this.remoteSearches = new ArrayList<plasmaSearchQuery>();

Loading…
Cancel
Save