changed method of profiling

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@4248 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent 76cd6ed6f6
commit b46bcaa5d8

@ -360,7 +360,7 @@ public class IndexControlRWIs_p {
private static plasmaSearchRankingProcess genSearchresult(serverObjects prop, plasmaSwitchboard sb, String keyhash, kelondroBitfield filter, int sortorder, boolean fetchURLs) {
plasmaSearchQuery query = new plasmaSearchQuery(keyhash, -1, filter);
plasmaSearchRankingProcess ranked = new plasmaSearchRankingProcess(sb.wordIndex, query, null, sb.getRanking(), sortorder, Integer.MAX_VALUE);
plasmaSearchRankingProcess ranked = new plasmaSearchRankingProcess(sb.wordIndex, query, sb.getRanking(), sortorder, Integer.MAX_VALUE);
ranked.execQuery(fetchURLs);
if (ranked.filteredCount() == 0) {

@ -24,90 +24,23 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import java.util.Iterator;
import java.util.Map;
import de.anomic.http.httpHeader;
import de.anomic.plasma.plasmaSwitchboard;
import de.anomic.plasma.plasmaProfiling;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import de.anomic.ymage.ymageChart;
import de.anomic.ymage.ymageMatrix;
public class PerformanceGraph {
public static ymageMatrix respond(httpHeader header, serverObjects post, serverSwitch env) {
plasmaSwitchboard sb = (plasmaSwitchboard) env;
//plasmaSwitchboard sb = (plasmaSwitchboard) env;
if (post == null) post = new serverObjects();
// find maximum values for automatic graph dimension adoption
Iterator i = sb.ppmHistory.entrySet().iterator();
Map.Entry entry;
int ppm, maxppm = 50;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
ppm = (int) ((Long) entry.getValue()).longValue();
if (ppm > maxppm) maxppm = ppm;
}
i = sb.usedMemoryHistory.entrySet().iterator();
long bytes, maxbytes = 100 * 1024 * 1024;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
bytes = ((Long) entry.getValue()).longValue();
if (bytes > maxbytes) maxbytes = bytes;
}
// declare graph and set dimensions
int height = 240;
int width = 660;
int leftborder = 30;
int rightborder = 30;
int topborder = 20;
int bottomborder = 20;
int vspace = height - topborder - bottomborder;
int hspace = width - leftborder - rightborder;
int maxtime = 600;
ymageChart ip = new ymageChart(width, height, "FFFFFF", "000000", leftborder, rightborder, topborder, bottomborder, "PEER PERFORMANCE GRAPH: PAGES/MINUTE and USED MEMORY");
ip.declareDimension(ymageChart.DIMENSION_BOTTOM, 60, hspace / (maxtime / 60), -maxtime, "000000", "CCCCCC", "TIME/SECONDS");
ip.declareDimension(ymageChart.DIMENSION_LEFT, 20, /*40*/ vspace * 20 / maxppm, 0, "008800", null , "PPM [PAGES/MINUTE]");
ip.declareDimension(ymageChart.DIMENSION_RIGHT, 100, /*20*/ vspace * 100 / (int)(maxbytes / 1024 / 1024), 0, "0000FF", "CCCCCC", "MEMORY/MEGABYTE");
// draw ppm
ip.setColor("008800");
i = sb.ppmHistory.entrySet().iterator();
long time, now = System.currentTimeMillis();
int x0 = 1, x1, y0 = 0, y1;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
time = ((Long) entry.getKey()).longValue() - now;
ppm = (int) ((Long) entry.getValue()).longValue();
//System.out.println("PPM: time = " + time + ", ppm = " + ppm);
x1 = (int) (time/1000);
y1 = ppm;
ip.chartDot(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, x1, y1, 1);
if (x0 < 0) ip.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, x0, y0, x1, y1);
x0 = x1; y0 = y1;
}
// draw memory
ip.setColor("0000FF");
i = sb.usedMemoryHistory.entrySet().iterator();
x0 = 1;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
time = ((Long) entry.getKey()).longValue() - now;
bytes = ((Long) entry.getValue()).longValue();
//System.out.println("Memory: time = " + time + ", bytes = " + bytes);
x1 = (int) (time/1000);
y1 = (int) (bytes / 1024 / 1024);
ip.chartDot(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_RIGHT, x1, y1, 1);
if (x0 < 0) ip.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_RIGHT, x0, y0, x1, y1);
x0 = x1; y0 = y1;
}
int width = post.getInt("width", 660);
int height = post.getInt("height", 240);
return ip;
return plasmaProfiling.performanceGraph(width, height);
}
}

@ -8,6 +8,7 @@
<script type="text/javascript"><!--
function reloadGraph() {
document.images["graph"].src="PerformanceGraph.png?time="+(new Date()).getTime() ;
window.status="";
}
window.setInterval("reloadGraph()", 1000);
//--></script>

@ -14,14 +14,18 @@
</p>
<table border="0" cellpadding="2" cellspacing="1">
<tr class="TableHeader" valign="bottom">
<td>Query</td>
<td>Event</td>
<td>Time (milliseconds)</td>
<td>Time</td>
<td>Duration (milliseconds)</td>
<td>Result-Count</td>
</tr>
#{table}#
<tr class="TableCellDark" valign="bottom">
<td>#[query]#</td>
<td>#[event]#</td>
<td>#[time]#</td>
<td>#[duration]#</td>
<td>#[count]#</td>
</tr>
#{/table}#

@ -24,10 +24,11 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import java.util.Date;
import java.util.Iterator;
import de.anomic.http.httpHeader;
import de.anomic.plasma.plasmaSearchEvent;
import de.anomic.plasma.plasmaProfiling;
import de.anomic.server.serverObjects;
import de.anomic.server.serverProfiling;
import de.anomic.server.serverSwitch;
@ -37,21 +38,19 @@ public class PerformanceSearch_p {
public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch sb) {
// return variable that accumulates replacements
serverObjects prop = new serverObjects();
plasmaSearchEvent se = plasmaSearchEvent.getEvent(plasmaSearchEvent.lastEventID);
if (se == null) {
prop.put("table", "0");
return prop;
}
Iterator events = se.getProcess().events();
Iterator events = serverProfiling.history("SEARCH");
int c = 0;
serverProfiling.Entry event;
serverProfiling.Event event;
plasmaProfiling.searchEvent search;
while (events.hasNext()) {
event = (serverProfiling.Entry) events.next();
prop.put("table_" + c + "_event", event.process);
prop.putNum("table_" + c + "_count", event.count);
prop.putNum("table_" + c + "_time", event.time);
event = (serverProfiling.Event) events.next();
search = (plasmaProfiling.searchEvent) event.payload;
prop.put("table_" + c + "_query", search.queryID);
prop.put("table_" + c + "_event", search.processName);
prop.putNum("table_" + c + "_count", search.resultCount);
prop.put("table_" + c + "_time", (new Date(event.time)).toString());
prop.putNum("table_" + c + "_duration", search.duration);
c++;
}
prop.put("table", c);

@ -40,6 +40,7 @@ import de.anomic.kelondro.kelondroBase64Order;
import de.anomic.kelondro.kelondroBitfield;
import de.anomic.index.indexContainer;
import de.anomic.net.natLib;
import de.anomic.plasma.plasmaProfiling;
import de.anomic.plasma.plasmaSearchEvent;
import de.anomic.plasma.plasmaSearchQuery;
import de.anomic.plasma.plasmaSearchRankingProfile;
@ -136,7 +137,6 @@ public final class search {
int indexabstractContainercount = 0;
int joincount = 0;
plasmaSearchQuery theQuery = null;
serverProfiling localProfiling = null;
ArrayList accu = null;
long urlRetrievalAllTime = 0, snippetComputationAllTime = 0;
if ((query.length() == 0) && (abstractSet != null)) {
@ -145,13 +145,9 @@ public final class search {
theQuery.domType = plasmaSearchQuery.SEARCHDOM_LOCAL;
yacyCore.log.logInfo("INIT HASH SEARCH (abstracts only): " + plasmaSearchQuery.anonymizedQueryHashes(theQuery.queryHashes) + " - " + theQuery.displayResults() + " links");
// prepare a search profile
localProfiling = new serverProfiling();
//theSearch = new plasmaSearchEvent(squery, rankingProfile, localTiming, remoteTiming, true, sb.wordIndex, null);
localProfiling.startTimer();
long timer = System.currentTimeMillis();
Map[] containers = sb.wordIndex.localSearchContainers(theQuery, plasmaSearchQuery.hashes2Set(urls));
localProfiling.yield(plasmaSearchEvent.COLLECTION, containers[0].size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(theQuery.id(), plasmaSearchEvent.COLLECTION, containers[0].size(), System.currentTimeMillis() - timer));
if (containers != null) {
Iterator ci = containers[0].entrySet().iterator();
Map.Entry entry;
@ -178,8 +174,7 @@ public final class search {
// prepare a search profile
plasmaSearchRankingProfile rankingProfile = (profile.length() == 0) ? new plasmaSearchRankingProfile(plasmaSearchQuery.contentdomParser(contentdom)) : new plasmaSearchRankingProfile("", profile);
localProfiling = new serverProfiling();
plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, rankingProfile, localProfiling, sb.wordIndex, null, true, abstractSet);
plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, rankingProfile, sb.wordIndex, null, true, abstractSet);
urlRetrievalAllTime = theSearch.getURLRetrievalTime();
snippetComputationAllTime = theSearch.getSnippetComputationTime();
@ -239,7 +234,7 @@ public final class search {
if (partitions > 0) sb.requestedQueries = sb.requestedQueries + 1d / partitions; // increase query counter
// prepare reference hints
localProfiling.startTimer();
long timer = System.currentTimeMillis();
Set ws = theSearch.references(10);
StringBuffer refstr = new StringBuffer();
Iterator j = ws.iterator();
@ -247,7 +242,7 @@ public final class search {
refstr.append(",").append((String) j.next());
}
prop.put("references", (refstr.length() > 0) ? refstr.substring(1) : refstr.toString());
localProfiling.yield("reference collection", ws.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(theQuery.id(), "reference collection", ws.size(), System.currentTimeMillis() - timer));
}
prop.put("indexabstract", indexabstract.toString());
@ -261,7 +256,7 @@ public final class search {
} else {
// result is a List of urlEntry elements
localProfiling.startTimer();
long timer = System.currentTimeMillis();
StringBuffer links = new StringBuffer();
String resource = null;
plasmaSearchEvent.ResultEntry entry;
@ -274,7 +269,7 @@ public final class search {
}
prop.put("links", links.toString());
prop.put("linkcount", accu.size());
localProfiling.yield("result list preparation", accu.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(theQuery.id(), "result list preparation", accu.size(), System.currentTimeMillis() - timer));
}
// add information about forward peers

@ -64,7 +64,6 @@ import de.anomic.plasma.plasmaSwitchboard;
import de.anomic.server.serverCore;
import de.anomic.server.serverDate;
import de.anomic.server.serverObjects;
import de.anomic.server.serverProfiling;
import de.anomic.server.serverSwitch;
import de.anomic.server.logging.serverLog;
import de.anomic.tools.yFormatter;
@ -268,7 +267,6 @@ public class yacysearch {
20,
constraint,
true);
serverProfiling localProfiling = new serverProfiling();
String client = (String) header.get("CLIENTIP"); // the search client who initiated the search
@ -287,7 +285,7 @@ public class yacysearch {
theQuery.setOffset(0); // in case that this is a new search, always start without a offset
offset = 0;
}
plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, sb.getRanking(), localProfiling, sb.wordIndex, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false, null);
plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, sb.getRanking(), sb.wordIndex, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false, null);
// generate result object
serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER ORDERING OF SEARCH RESULTS: " + ((System.currentTimeMillis() - timestamp) / 1000) + " seconds");

@ -144,7 +144,7 @@ public class plasmaCrawlQueues {
(!sb.getConfig(plasmaSwitchboard.CLUSTER_MODE, "").equals(plasmaSwitchboard.CLUSTER_MODE_PUBLIC_CLUSTER)) &&
(!sb.getConfig(plasmaSwitchboard.CLUSTER_MODE, "").equals(plasmaSwitchboard.CLUSTER_MODE_PRIVATE_CLUSTER)));
if ((robinsonPrivateCase) || ((coreCrawlJobSize() <= 20) && (limitCrawlJobSize() > 0))) {
if (((robinsonPrivateCase) || (coreCrawlJobSize() <= 20)) && (limitCrawlJobSize() > 0)) {
// move some tasks to the core crawl job so we have something to do
int toshift = Math.min(10, limitCrawlJobSize()); // this cannot be a big number because the balancer makes a forced waiting if it cannot balance
for (int i = 0; i < toshift; i++) {
@ -181,7 +181,7 @@ public class plasmaCrawlQueues {
try {
status[plasmaSwitchboard.CRAWLJOB_SYNC].wait();
}
catch (InterruptedException e){ return false;}
catch (InterruptedException e) {return false;}
}
}

@ -0,0 +1,138 @@
// plasmaProfiling.java
// (C) 2007 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
// first published 04.12.2007 on http://yacy.net
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
// $LastChangedRevision: 1986 $
// $LastChangedBy: orbiter $
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package de.anomic.plasma;
import java.util.Iterator;
import de.anomic.server.serverProfiling;
import de.anomic.yacy.yacyCore;
import de.anomic.ymage.ymageChart;
import de.anomic.ymage.ymageMatrix;
public class plasmaProfiling {
public static long lastPPMUpdate;
static {
// initialize memory profiling
lastPPMUpdate = System.currentTimeMillis()- 30000;
}
public static void updateIndexedPage(plasmaSwitchboardQueue.Entry entry) {
if (System.currentTimeMillis() - lastPPMUpdate > 30000) {
// we don't want to do this too often
yacyCore.peerActions.updateMySeed();
serverProfiling.update("ppm", new Long(yacyCore.seedDB.mySeed().getPPM()));
lastPPMUpdate = System.currentTimeMillis();
}
serverProfiling.update("indexed", entry.url().toNormalform(true, false));
}
public static long maxPayload(String eventname, long min) {
Iterator i = serverProfiling.history(eventname);
serverProfiling.Event event;
long max = min, l;
while (i.hasNext()) {
event = (serverProfiling.Event) i.next();
l = ((Long) event.payload).longValue();
if (l > max) max = l;
}
return max;
}
public static ymageMatrix performanceGraph(int width, int height) {
// find maximum values for automatic graph dimension adoption
int maxppm = (int) maxPayload("ppm", 25);
long maxbytes = maxPayload("memory", 110 * 1024 * 1024);
// declare graph and set dimensions
int leftborder = 30;
int rightborder = 30;
int topborder = 20;
int bottomborder = 20;
int leftscale = 20;
int rightscale = 100;
int bottomscale = 60;
int vspace = height - topborder - bottomborder;
int hspace = width - leftborder - rightborder;
int maxtime = 600;
ymageChart ip = new ymageChart(width, height, "FFFFFF", "000000", leftborder, rightborder, topborder, bottomborder, "PEER PERFORMANCE GRAPH: PAGES/MINUTE and USED MEMORY");
ip.declareDimension(ymageChart.DIMENSION_BOTTOM, bottomscale, hspace / (maxtime / bottomscale), -maxtime, "000000", "CCCCCC", "TIME/SECONDS");
ip.declareDimension(ymageChart.DIMENSION_LEFT, leftscale, vspace * leftscale / maxppm, 0, "008800", null , "PPM [PAGES/MINUTE]");
ip.declareDimension(ymageChart.DIMENSION_RIGHT, rightscale, vspace * rightscale / (int)(maxbytes / 1024 / 1024), 0, "0000FF", "CCCCCC", "MEMORY/MEGABYTE");
// draw ppm
Iterator i = serverProfiling.history("ppm");
long time, now = System.currentTimeMillis(), bytes;
int x0 = 1, x1, y0 = 0, y1, ppm;
serverProfiling.Event event;
while (i.hasNext()) {
event = (serverProfiling.Event) i.next();
time = event.time - now;
ppm = (int) ((Long) event.payload).longValue();
x1 = (int) (time/1000);
y1 = ppm;
ip.setColor("228822");
ip.chartDot(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, x1, y1, 2);
ip.setColor("008800");
if (x0 < 0) ip.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_LEFT, x0, y0, x1, y1);
x0 = x1; y0 = y1;
}
// draw memory
i = serverProfiling.history("memory");
x0 = 1;
while (i.hasNext()) {
event = (serverProfiling.Event) i.next();
time = event.time - now;
bytes = ((Long) event.payload).longValue();
x1 = (int) (time/1000);
y1 = (int) (bytes / 1024 / 1024);
ip.setColor("AAAAFF");
ip.chartDot(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_RIGHT, x1, y1, 2);
ip.setColor("0000FF");
if (x0 < 0) ip.chartLine(ymageChart.DIMENSION_BOTTOM, ymageChart.DIMENSION_RIGHT, x0, y0, x1, y1);
x0 = x1; y0 = y1;
}
return ip;
}
public static class searchEvent {
public String queryID, processName;
public long duration;
public int resultCount;
public searchEvent(String queryID, String processName, int resultCount, long duration) {
this.queryID = queryID;
this.processName = processName;
this.resultCount = resultCount;
this.duration = duration;
}
}
}

@ -69,7 +69,6 @@ public final class plasmaSearchEvent {
private plasmaWordIndex wordIndex;
private plasmaSearchRankingProcess rankedCache; // ordered search results, grows dynamically as all the query threads enrich this container
private Map rcAbstracts; // cache for index abstracts; word:TreeMap mapping where the embedded TreeMap is a urlhash:peerlist relation
private serverProfiling process;
private yacySearch[] primarySearchThreads, secondarySearchThreads;
private Thread localSearchThread;
private TreeMap preselectedPeerHashes;
@ -87,7 +86,6 @@ public final class plasmaSearchEvent {
private plasmaSearchEvent(plasmaSearchQuery query,
plasmaSearchRankingProfile ranking,
serverProfiling localTiming,
plasmaWordIndex wordIndex,
TreeMap preselectedPeerHashes,
boolean generateAbstracts,
@ -97,7 +95,6 @@ public final class plasmaSearchEvent {
this.query = query;
this.ranking = ranking;
this.rcAbstracts = (query.queryHashes.size() > 1) ? new TreeMap() : null; // generate abstracts only for combined searches
this.process = localTiming;
this.primarySearchThreads = null;
this.secondarySearchThreads = null;
this.preselectedPeerHashes = preselectedPeerHashes;
@ -125,14 +122,14 @@ public final class plasmaSearchEvent {
if ((query.domType == plasmaSearchQuery.SEARCHDOM_GLOBALDHT) ||
(query.domType == plasmaSearchQuery.SEARCHDOM_CLUSTERALL)) {
// do a global search
this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, process, ranking, 2, max_results_preparation);
this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, ranking, 2, max_results_preparation);
int fetchpeers = (int) (query.maximumTime / 500L); // number of target peers; means 10 peers in 10 seconds
if (fetchpeers > 50) fetchpeers = 50;
if (fetchpeers < 30) fetchpeers = 30;
// the result of the fetch is then in the rcGlobal
process.startTimer();
long timer = System.currentTimeMillis();
serverLog.logFine("SEARCH_EVENT", "STARTING " + fetchpeers + " THREADS TO CATCH EACH " + query.displayResults() + " URLs");
this.primarySearchThreads = yacySearch.primaryRemoteSearches(
plasmaSearchQuery.hashSet2hashString(query.queryHashes),
@ -150,7 +147,7 @@ public final class plasmaSearchEvent {
ranking,
query.constraint,
(query.domType == plasmaSearchQuery.SEARCHDOM_GLOBALDHT) ? null : preselectedPeerHashes);
process.yield("remote search thread start", this.primarySearchThreads.length);
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), "remote search thread start", this.primarySearchThreads.length, System.currentTimeMillis() - timer));
// meanwhile do a local search
localSearchThread = new localSearchProcess();
@ -160,14 +157,14 @@ public final class plasmaSearchEvent {
serverLog.logFine("SEARCH_EVENT", "SEARCH TIME AFTER GLOBAL-TRIGGER TO " + primarySearchThreads.length + " PEERS: " + ((System.currentTimeMillis() - start) / 1000) + " seconds");
} else {
// do a local search
this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, process, ranking, 2, max_results_preparation);
this.rankedCache = new plasmaSearchRankingProcess(wordIndex, query, ranking, 2, max_results_preparation);
this.rankedCache.execQuery(true);
this.localcount = this.rankedCache.filteredCount();
//plasmaWordIndex.Finding finding = wordIndex.retrieveURLs(query, false, 2, ranking, process);
if (generateAbstracts) {
// compute index abstracts
process.startTimer();
long timer = System.currentTimeMillis();
Iterator ci = this.rankedCache.searchContainerMaps()[0].entrySet().iterator();
Map.Entry entry;
int maxcount = -1;
@ -191,7 +188,7 @@ public final class plasmaSearchEvent {
IACount.put(wordhash, new Integer(container.size()));
IAResults.put(wordhash, indexContainer.compressIndex(container, null, 1000).toString());
}
process.yield("abstract generation", this.rankedCache.searchContainerMaps()[0].size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), "abstract generation", this.rankedCache.searchContainerMaps()[0].size(), System.currentTimeMillis() - timer));
}
}
@ -205,7 +202,7 @@ public final class plasmaSearchEvent {
}
} else {
// prepare result vector directly without worker threads
process.startTimer();
long timer = System.currentTimeMillis();
indexURLEntry uentry;
ResultEntry resultEntry;
yacyURL url;
@ -231,7 +228,7 @@ public final class plasmaSearchEvent {
}
}
}
process.yield("offline snippet fetch", resultList.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), "offline snippet fetch", resultList.size(), System.currentTimeMillis() - timer));
}
// clean up events
@ -421,10 +418,6 @@ public final class plasmaSearchEvent {
return ranking;
}
public serverProfiling getProcess() {
return process;
}
public yacySearch[] getPrimarySearchThreads() {
return primarySearchThreads;
}
@ -457,7 +450,6 @@ public final class plasmaSearchEvent {
public static plasmaSearchEvent getEvent(plasmaSearchQuery query,
plasmaSearchRankingProfile ranking,
serverProfiling localTiming,
plasmaWordIndex wordIndex,
TreeMap preselectedPeerHashes,
boolean generateAbstracts,
@ -465,7 +457,7 @@ public final class plasmaSearchEvent {
synchronized (lastEvents) {
plasmaSearchEvent event = (plasmaSearchEvent) lastEvents.get(query.id());
if (event == null) {
event = new plasmaSearchEvent(query, ranking, localTiming, wordIndex, preselectedPeerHashes, generateAbstracts, abstractSet);
event = new plasmaSearchEvent(query, ranking, wordIndex, preselectedPeerHashes, generateAbstracts, abstractSet);
} else {
//re-new the event time for this event, so it is not deleted next time too early
event.eventTime = System.currentTimeMillis();

@ -61,7 +61,6 @@ public final class plasmaSearchRankingProcess {
private int maxentries;
private int globalcount;
private indexRWIEntryOrder order;
private serverProfiling process;
private HashMap urlhashes; // map for double-check; String/Long relation, addresses ranking number (backreference for deletion)
private kelondroMScoreCluster ref; // reference score computation for the commonSense heuristic
private int[] flagcount; // flag counter
@ -69,7 +68,7 @@ public final class plasmaSearchRankingProcess {
private plasmaWordIndex wordIndex;
private Map[] localSearchContainerMaps;
public plasmaSearchRankingProcess(plasmaWordIndex wordIndex, plasmaSearchQuery query, serverProfiling process, plasmaSearchRankingProfile ranking, int sortorder, int maxentries) {
public plasmaSearchRankingProcess(plasmaWordIndex wordIndex, plasmaSearchQuery query, plasmaSearchRankingProfile ranking, int sortorder, int maxentries) {
// we collect the urlhashes and construct a list with urlEntry objects
// attention: if minEntries is too high, this method will not terminate within the maxTime
// sortorder: 0 = hash, 1 = url, 2 = ranking
@ -78,7 +77,6 @@ public final class plasmaSearchRankingProcess {
this.doubleDomCache = new HashMap();
this.handover = new HashMap();
this.filteredCount = 0;
this.process = process;
this.order = null;
this.query = query;
this.ranking = ranking;
@ -95,12 +93,12 @@ public final class plasmaSearchRankingProcess {
public void execQuery(boolean fetchURLs) {
if (process != null) process.startTimer();
long timer = System.currentTimeMillis();
this.localSearchContainerMaps = wordIndex.localSearchContainers(query, null);
if (process != null) process.yield(plasmaSearchEvent.COLLECTION, this.localSearchContainerMaps[0].size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), plasmaSearchEvent.COLLECTION, this.localSearchContainerMaps[0].size(), System.currentTimeMillis() - timer));
// join and exlcude the local result
if (process != null) process.startTimer();
timer = System.currentTimeMillis();
indexContainer index =
(this.localSearchContainerMaps == null) ?
plasmaWordIndex.emptyContainer(null, 0) :
@ -108,7 +106,7 @@ public final class plasmaSearchRankingProcess {
this.localSearchContainerMaps[0].values(),
this.localSearchContainerMaps[1].values(),
query.maxDistance);
if (process != null) process.yield(plasmaSearchEvent.JOIN, index.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), plasmaSearchEvent.JOIN, index.size(), System.currentTimeMillis() - timer));
int joincount = index.size();
if ((index == null) || (joincount == 0)) {
@ -169,12 +167,12 @@ public final class plasmaSearchRankingProcess {
assert (container != null);
if (container.size() == 0) return;
if (process != null) process.startTimer();
long timer = System.currentTimeMillis();
if (this.order == null) {
this.order = new indexRWIEntryOrder(ranking);
}
this.order.extend(container);
if (process != null) process.yield(plasmaSearchEvent.NORMALIZING, container.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), plasmaSearchEvent.NORMALIZING, container.size(), System.currentTimeMillis() - timer));
/*
container.setOrdering(o, 0);
@ -182,7 +180,7 @@ public final class plasmaSearchRankingProcess {
*/
// normalize entries and get ranking
if (process != null) process.startTimer();
timer = System.currentTimeMillis();
Iterator i = container.entries();
indexRWIEntry iEntry, l;
long biggestEntry = 0;
@ -235,8 +233,7 @@ public final class plasmaSearchRankingProcess {
//System.out.println("###DEBUG### time to sort " + container.size() + " entries to " + this.filteredCount + ": " + sc + " milliseconds, " + (container.size() / sc) + " entries/millisecond, ranking = " + tc);
//if ((query.neededResults() > 0) && (container.size() > query.neededResults())) remove(true, true);
if (process != null) process.yield(plasmaSearchEvent.PRESORT, container.size());
serverProfiling.update("SEARCH", new plasmaProfiling.searchEvent(query.id(), plasmaSearchEvent.PRESORT, container.size(), System.currentTimeMillis() - timer));
}
private boolean testFlags(indexRWIEntry ientry) {

@ -146,6 +146,7 @@ import de.anomic.server.serverFileUtils;
import de.anomic.server.serverInstantThread;
import de.anomic.server.serverMemory;
import de.anomic.server.serverObjects;
import de.anomic.server.serverProfiling;
import de.anomic.server.serverSemaphore;
import de.anomic.server.serverSwitch;
import de.anomic.server.serverThread;
@ -241,8 +242,6 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
public boolean acceptLocalURLs, acceptGlobalURLs;
public URLLicense licensedURLs;
public Timer moreMemory;
public TreeMap ppmHistory, usedMemoryHistory;
public long lastPPMUpdate;
/*
* Remote Proxy configuration
@ -877,14 +876,9 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
public plasmaSwitchboard(String rootPath, String initPath, String configPath, boolean applyPro) {
super(rootPath, initPath, configPath, applyPro);
serverProfiling.startSystemProfiling();
sb=this;
// initialize memory profiling
ppmHistory = new TreeMap();
usedMemoryHistory = new TreeMap();
lastPPMUpdate = System.currentTimeMillis();
updateProfiling();
// set loglevel and log
setLog(new serverLog("PLASMA"));
if (applyPro) this.log.logInfo("This is the pro-version of YaCy");
@ -1406,29 +1400,6 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
return sb;
}
public void updateProfiling() {
Long time = new Long(System.currentTimeMillis());
usedMemoryHistory.put(time, new Long(serverMemory.used()));
if (time.longValue() - lastPPMUpdate > 30000) {
// we don't want to do this too often
yacyCore.peerActions.updateMySeed();
ppmHistory.put(time, new Long(yacyCore.seedDB.mySeed().getPPM()));
lastPPMUpdate = time.longValue();
}
// clean up too old entries
while (usedMemoryHistory.size() > 0) {
time = (Long) usedMemoryHistory.firstKey();
if (System.currentTimeMillis() - time.longValue() < 600000) break;
usedMemoryHistory.remove(time);
}
while (ppmHistory.size() > 0) {
time = (Long) ppmHistory.firstKey();
if (System.currentTimeMillis() - time.longValue() < 600000) break;
ppmHistory.remove(time);
}
}
public boolean isRobinsonMode() {
// we are in robinson mode, if we do not exchange index by dht distribution
// we need to take care that search requests and remote indexing requests go only
@ -1762,6 +1733,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
public void close() {
log.logConfig("SWITCHBOARD SHUTDOWN STEP 1: sending termination signal to managed threads:");
serverProfiling.stopSystemProfiling();
moreMemory.cancel();
terminateAllThreads(true);
if (transferIdxThread != null) stopTransferWholeIndex(false);
@ -1818,7 +1790,6 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
}
public boolean deQueue() {
updateProfiling();
try {
// work off fresh entries from the proxy or from the crawler
if (onlineCaution()) {
@ -1910,9 +1881,6 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
// parse and index the resource
processResourceStack(nextentry);
}
// ready & finished
updateProfiling();
return true;
} catch (InterruptedException e) {
log.logInfo("DEQUEUE: Shutdown detected.");
@ -2450,6 +2418,9 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
"StorageTime: " + (storageEndTime-storageStartTime) + " ms");
}
// update profiling info
plasmaProfiling.updateIndexedPage(entry);
// check for interruption
checkInterruption();

@ -26,49 +26,119 @@
package de.anomic.server;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.TreeMap;
public class serverProfiling implements Cloneable {
public class serverProfiling extends Thread {
private ArrayList yield;
private long timer;
private static Map historyMaps; // key=name of history, value=TreeMap of Long/Event
private static Map eventCounter; // key=name of history, value=Integer of event counter
private static long lastCompleteCleanup;
private static serverProfiling systemProfiler;
public serverProfiling() {
yield = new ArrayList();
timer = 0;
static {
// initialize profiling
historyMaps = Collections.synchronizedMap(new HashMap());
eventCounter = Collections.synchronizedMap(new HashMap());
lastCompleteCleanup = System.currentTimeMillis();
systemProfiler = null;
}
public static class Entry {
public String process;
public int count;
public long time;
public static void startSystemProfiling() {
systemProfiler = new serverProfiling(1000);
systemProfiler.start();
}
public Entry(String process, int count, long time) {
this.process = process;
this.count = count;
this.time = time;
}
public static void stopSystemProfiling() {
systemProfiler.running = false;
}
private long delaytime;
private boolean running;
public serverProfiling(long time) {
this.delaytime = time;
running = true;
}
public void run() {
while (running) {
update("memory", new Long(serverMemory.used()));
try {
Thread.sleep(this.delaytime);
} catch (InterruptedException e) {
this.running = false;
}
}
}
public static void update(String eventName, Object eventPayload) {
// get event history container
int counter = eventCounter.containsKey(eventName) ? ((Integer) eventCounter.get(eventName)).intValue() : 0;
TreeMap history = historyMaps.containsKey(eventName) ? ((TreeMap) historyMaps.get(eventName)) : new TreeMap();
// update entry
Long time = new Long(System.currentTimeMillis());
history.put(time, new Event(counter, eventPayload));
counter++;
eventCounter.put(eventName, new Integer(counter));
// clean up too old entries
cleanup(history);
cleanup();
// store map
historyMaps.put(eventName, history);
}
public void startTimer() {
this.timer = System.currentTimeMillis();
private static void cleanup() {
if (System.currentTimeMillis() - lastCompleteCleanup < 600000) return;
Object[] historyNames = historyMaps.keySet().toArray();
for (int i = 0; i < historyNames.length; i++) {
cleanup((String) historyNames[i]);
}
lastCompleteCleanup = System.currentTimeMillis();
}
public void yield(String s, int count) {
long t = System.currentTimeMillis() - this.timer;
Entry e = new Entry(s, count, t);
yield.add(e);
private static void cleanup(String eventName) {
if (historyMaps.containsKey(eventName)) {
TreeMap history = (TreeMap) historyMaps.get(eventName);
cleanup(history);
if (history.size() > 0) {
historyMaps.put(eventName, history);
} else {
historyMaps.remove(eventName);
}
}
}
private static void cleanup(TreeMap history) {
// clean up too old entries
while (history.size() > 0) {
Long time = (Long) history.firstKey();
if (System.currentTimeMillis() - time.longValue() < 600000) break;
history.remove(time);
}
}
public Iterator events() {
// iteratese Entry-type Objects
return yield.iterator();
public static Iterator history(String eventName) {
return (historyMaps.containsKey(eventName) ? ((TreeMap) historyMaps.get(eventName)) : new TreeMap()).values().iterator();
}
public int size() {
// returns number of events / Entry-Objects in yield array
return yield.size();
public static class Event {
public int count;
public Object payload;
public long time;
public Event(int count, Object payload) {
this.count = count;
this.payload = payload;
this.time = System.currentTimeMillis();
}
}
}

Loading…
Cancel
Save