- added LogStatistics_p.html servlet based on the logalizer (indexing values not functional yet due to charset/regex problems)

add the following to DATA/LOG/yacy.logging:
---
# Properties for the LogalizerHandler
de.anomic.server.logging.LogalizerHandler.enabled = true
de.anomic.server.logging.LogalizerHandler.debug = false
de.anomic.server.logging.LogalizerHandler.parserPackage = de.anomic.server.logging.logParsers
---
and "de.anomic.server.logging.LogalizerHandler" to the list of global handlers

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@3219 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
karlchenofhell 18 years ago
parent ce6493e265
commit 71112b1fe6

@ -0,0 +1,102 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<title>YaCy '#[clientname]#': Log Statistics</title>
#%env/templates/metas.template%#
</head>
<body id="ViewProfile">
#%env/templates/header.template%#
<h2>Log statistics<!--of the last #[parsedLines]# lines--></h2>
<form method="get" action="LogStatistics_p.html">
#(results)#::
<fieldset><legend>DHT</legend>
<fieldset>
<legend>URLs</legend>
<dl style="margin-top: .3em;">
<dt>URLs Received:</dt><dd>#[urlSum]#</dd>
<dt>URLs Requested:</dt><dd>#[urlReqSum]#</dd>
<dt>URLs Blocked:</dt><dd>#[blockedURLSum]#</dd>
<dt>Total time:</dt><dd>#[urlTimeSum]# #[urlTimeSumUnit]#</dd>
<dt>URLs Sent:</dt><dd>#[DHTSendURLs]#</dd>
</dl>
</fieldset>
<fieldset>
<legend>RWIs / Words</legend>
<dl style="margin-top: .3em;">
<dt>Words received:</dt><dd>#[wordsSum]#</dd>
<dt>RWIs reveived:</dt><dd>#[rwiSum]#</dd>
<dt>RWIs blocked:</dt><dd>#[blockedRWISum]#</dd>
<dt>Total time:</dt><dd>#[rwiTimeSum]# #[rwiTimeSumUnit]#</dd>
<dt>RWIs selected:</dt><dd>#[DHTSelectionWordsCount]#</dd>
<dt>Selection time:</dt><dd>#[DHTSelectionWordsTimeCount]#</dd>
</dl>
</fieldset>
<fieldset>
<legend>Chunks / Protocol</legend>
<dl style="margin-top: .3em;">
<dt>Sent traffic:</dt><dd>#[DHTSendTraffic]# #[DHTSendTrafficUnit]#</dd>
<dt>Total peers selected:</dt><dd>#[DHTSelectionTargetCount]#</dd>
<dt>DHT Distance: Min/Average/Max:</dt><dd>#[minDHTDist]# / #[avgDHTDist]# / #[maxDHTDist]#</dd>
<dt>Not enough peers found for distribution:</dt><dd>#[notEnoughDHTPeers]#</dd>
<dt>Peers busy:</dt><dd>#[busyPeerCount]#</dd>
<dt>Distribution failed:</dt><dd>#[failedIndexDistributionCount]#</dd>
<dt>Rejected index receives:</dt><dd>#[RWIRejectCount]#</dd>
<dt>Rejected index receives from:</dt>
<dd class="hides">
#[DHTRejectPeers]# peers#(useDHTRejectPeers)#::
<div class="hoverShow">
<ul class="tagList">#{DHTRejectPeers}#
<li>( <span class="tt">#[hash]#</span> ) #[name]#</li>#{/DHTRejectPeers}#
</ul>
</div>#(/useDHTRejectPeers)#
</dd>
<dt>Sent indexes:</dt>
<dd class="hides">
#[DHTPeers]# peers#(useDHTPeers)#::
<div class="hoverShow">
<ul class="tagList">#{DHTPeers}#
<li>( <span class="tt">#[hash]#</span> ) #[name]#</li>#{/DHTPeers}#
</ul>
</div>#(/useDHTPeers)#
</dd>
</dl>
</fieldset>
</fieldset>
<fieldset><legend>Ranking Distribution</legend>
<dl>
<dt>Submitted ranking files:</dt><dd>#[rankingDistributionCount]#</dd>
<dt>Total time submitting ranking files:</dt><dd>#[rankingDistributionTime]#</dd>
<dt>Failed ranking distributions:</dt><dd>#[rankingDistributionFailCount]#</dd>
</dl>
</fieldset>
<fieldset><legend>Indexing</legend>
<dl>
<dt>Indexed sites:</dt><dd>#[indexedSites]#</dd>
<dt>Size of indexed sites:</dt><dd>#[indexedSiteSizeSum]# #[indexedSiteSizeSumUnit]#</dd>
<dt>Indexed words:</dt><dd>#[indexedWords]#</dd>
<dt>Indexed anchors:</dt><dd>#[indexedAnchors]#</dd>
<dt>Total stacking time:</dt><dd>#[indexedStackingTime]# #[indexedStackingTimeUnit]#</dd>
<dt>Total parsing time:</dt><dd>#[indexedParsingTime]# #[indexedParsingTimeUnit]#</dd>
<dt>Total indexing time:</dt><dd>#[indexedIndexingTime]# #[indexedIndexingTimeUnit]#</dd>
<dt>Total storage time:</dt><dd>#[indexedStorageTime]# #[indexedStorageTimeUnit]#</dd>
</dl>
</fieldset>
<fieldset><legend>Errors</legend>
<dl>
<dt><span class="tt">Tried to create left child node twice</span></dt><dd>#[leftChildTwiceCount]#</dd>
<dt><span class="tt">Tried to create right child node twice</span></dt><dd>#[rightChildTwiceCount]#</dd>
<dt><span class="tt">Malformed URL Exception</span></dt><dd>#[malformedURLCount]#</dd>
</dl>
</fieldset>
#(/results)#
</form>
#%env/templates/footer.template%#
</body>
</html>

@ -0,0 +1,195 @@
// LogStatistic_p.java
// -----------------------
// part of YaCy
// (C) by Michael Peter Christen; mc@anomic.de
// first published on http://www.anomic.de
// Frankfurt, Germany, 2005
// Created 16.01.2007
//
// This File is contributed by Franz Brauße
//
// $LastChangedDate: 2007-01-17 12:00:00 +0100 (Di, 17 Jan 2007) $
// $LastChangedRevision: 3216 $
// $LastChangedBy: karlchenofhell $
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// Using this software in any meaning (reading, learning, copying, compiling,
// running) means that you agree that the Author(s) is (are) not responsible
// for cost, loss of data or any harm that may be caused directly or indirectly
// by usage of this softare or this documentation. The usage of this software
// is on your own risk. The installation and usage (starting/running) of this
// software may allow other people or application to access your computer and
// any attached devices and is highly dependent on the configuration of the
// software which must be done by the user of the software; the author(s) is
// (are) also not responsible for proper configuration and usage of the
// software, even if provoked by documentation provided together with
// the software.
//
// Any changes to this file according to the GPL as documented in the file
// gpl.txt aside this file in the shipment you received can be done to the
// lines that follows this copyright notice here, but changes must not be
// done inside the copyright notive above. A re-distribution must contain
// the intact and unchanged copyright notice.
// Contributions and changes to the program code must be marked as such.
import java.util.HashSet;
import java.util.Hashtable;
import java.util.logging.Handler;
import java.util.logging.Logger;
import de.anomic.http.httpHeader;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import de.anomic.server.logging.LogalizerHandler;
import de.anomic.server.logging.logParsers.LogParser;
import de.anomic.server.logging.logParsers.LogParserPLASMA;
public class LogStatistics_p {
private static final String RESULTS = "results_";
public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch env) {
final serverObjects prop = new serverObjects();
Logger logger = Logger.getLogger("");
Handler[] handlers = logger.getHandlers();
Hashtable r = null;
for (int i=0; i<handlers.length; i++) {
if (handlers[i] instanceof LogalizerHandler) {
LogalizerHandler h = ((LogalizerHandler)handlers[i]);
r = h.getParserResults(h.getParser(0));
break;
}
}
if (r == null) {
prop.put("results", 0);
return prop;
} else {
prop.put("results", 1);
String[] t;
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_AVERAGE, r.get(LogParserPLASMA.DHT_DISTANCE_AVERAGE));
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_MAX, r.get(LogParserPLASMA.DHT_DISTANCE_MAX));
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_MIN, r.get(LogParserPLASMA.DHT_DISTANCE_MIN));
prop.put(RESULTS + LogParserPLASMA.DHT_REJECTED, r.get(LogParserPLASMA.DHT_REJECTED));
prop.put(RESULTS + LogParserPLASMA.DHT_SELECTED, r.get(LogParserPLASMA.DHT_SELECTED));
prop.put(RESULTS + LogParserPLASMA.DHT_SENT_FAILED, r.get(LogParserPLASMA.DHT_SENT_FAILED));
t = transformMem(((Long)r.get(LogParserPLASMA.DHT_TRAFFIC_SENT)).longValue());
prop.put(RESULTS + LogParserPLASMA.DHT_TRAFFIC_SENT, t[0]);
prop.put(RESULTS + LogParserPLASMA.DHT_TRAFFIC_SENT + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.DHT_URLS_SENT, r.get(LogParserPLASMA.DHT_URLS_SENT));
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED, r.get(LogParserPLASMA.DHT_WORDS_SELECTED));
t = transformTime(((Integer)r.get(LogParserPLASMA.DHT_WORDS_SELECTED_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.ERROR_CHILD_TWICE_LEFT, r.get(LogParserPLASMA.ERROR_CHILD_TWICE_LEFT));
prop.put(RESULTS + LogParserPLASMA.ERROR_CHILD_TWICE_RIGHT, r.get(LogParserPLASMA.ERROR_CHILD_TWICE_RIGHT));
prop.put(RESULTS + LogParserPLASMA.ERROR_MALFORMED_URL, r.get(LogParserPLASMA.ERROR_MALFORMED_URL));
prop.put(RESULTS + LogParserPLASMA.INDEXED_ANCHORS, r.get(LogParserPLASMA.INDEXED_ANCHORS));
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_INDEX_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.INDEXED_INDEX_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_INDEX_TIME + "Unit", t[1]);
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_PARSE_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.INDEXED_PARSE_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_PARSE_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES, r.get(LogParserPLASMA.INDEXED_SITES));
t = transformMem(((Integer)r.get(LogParserPLASMA.INDEXED_SITES_SIZE)).longValue());
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES_SIZE, t[0]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES_SIZE + "Unit", t[1]);
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_STACK_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.INDEXED_STACK_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_STACK_TIME + "Unit", t[1]);
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_STORE_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.INDEXED_STORE_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_STORE_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.INDEXED_WORDS, r.get(LogParserPLASMA.INDEXED_WORDS));
prop.put(RESULTS + LogParserPLASMA.PEERS_BUSY, r.get(LogParserPLASMA.PEERS_BUSY));
prop.put(RESULTS + LogParserPLASMA.PEERS_TOO_LESS, r.get(LogParserPLASMA.PEERS_TOO_LESS));
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST, r.get(LogParserPLASMA.RANKING_DIST));
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_FAILED, r.get(LogParserPLASMA.RANKING_DIST_FAILED));
t = transformTime(((Integer)r.get(LogParserPLASMA.RANKING_DIST_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.RWIS_BLOCKED, r.get(LogParserPLASMA.RWIS_BLOCKED));
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED, r.get(LogParserPLASMA.RWIS_RECEIVED));
t = transformTime(((Long)r.get(LogParserPLASMA.RWIS_RECEIVED_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.URLS_BLOCKED, r.get(LogParserPLASMA.URLS_BLOCKED));
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED, r.get(LogParserPLASMA.URLS_RECEIVED));
t = transformTime(((Long)r.get(LogParserPLASMA.URLS_RECEIVED_TIME)).longValue());
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED_TIME, t[0]);
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED_TIME + "Unit", t[1]);
prop.put(RESULTS + LogParserPLASMA.URLS_REQUESTED, r.get(LogParserPLASMA.URLS_REQUESTED));
prop.put(RESULTS + LogParserPLASMA.WORDS_RECEIVED, r.get(LogParserPLASMA.WORDS_RECEIVED));
Object[] names = ((HashSet)r.get(LogParserPLASMA.DHT_REJECTED_PEERS_NAME)).toArray();
Object[] hashes = ((HashSet)r.get(LogParserPLASMA.DHT_REJECTED_PEERS_HASH)).toArray();
int i = 0;
for (; i<names.length && i<hashes.length; i++) {
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers_" + i + "_name", names[i]);
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers_" + i + "_hash", hashes[i]);
}
prop.put(RESULTS + "DHTRejectPeers", i);
prop.put(RESULTS + "useDHTRejectPeers", (i > 0) ? 1 : 0);
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers", i);
names = ((HashSet)r.get(LogParserPLASMA.DHT_SENT_PEERS_NAME)).toArray();
hashes = ((HashSet)r.get(LogParserPLASMA.DHT_SENT_PEERS_HASH)).toArray();
i = 0;
for (; i<names.length && i<hashes.length; i++) {
prop.put(RESULTS + "useDHTPeers_DHTPeers_" + i + "_name", names[i]);
prop.put(RESULTS + "useDHTPeers_DHTPeers_" + i + "_hash", hashes[i]);
}
prop.put(RESULTS + "DHTPeers", i);
prop.put(RESULTS + "useDHTPeers", (i > 0) ? 1 : 0);
prop.put(RESULTS + "useDHTPeers_DHTPeers", i);
return prop;
}
}
private static final String MILLISECONDS = "ms";
private static final String SECONDS = "sec";
private static final String MINUTES = "min";
private static final String HOURS = "h";
private static final String DAYS = "days";
private static final String[] units = new String[] { "Bytes", "KiloBytes", "MegaBytes", "GigaBytes" };
private static String[] transformTime(long timems) {
if (timems > 1000) timems /= 1000; else return new String[] { Long.toString(timems), MILLISECONDS };
if (timems > 60) timems /= 60; else return new String[] { Long.toString(timems), SECONDS };
if (timems > 60) timems /= 60; else return new String[] { Long.toString(timems), MINUTES };
if (timems > 24) timems /= 24; else return new String[] { Long.toString(timems), HOURS };
return new String[] { Long.toString(timems), DAYS };
}
private static String[] transformMem(long mem) {
int i;
for (i=0; i<units.length && mem >= 10240; i++)
mem /= 1024;
return new String[] { Long.toString(mem), units[i] };
}
}

@ -488,4 +488,19 @@ div.CacheAdminInfo {
width: 100px;
border: 0px solid #aaaaaa;
float: left;
}
.hides .hoverShow {
display: none;
}
.hides:hover .hoverShow {
display: block;
position: absolute;
border: 3px double;
margin: 0 0 0 2em;
padding: .3em;
height: 20em;
overflow: auto;
width: 30em;
}

@ -81,7 +81,7 @@ a.bookmarkAction { color: #999999; }
.success { color: green; }
.Message { background-color: #eeeeee; }
.example { background-color: #D3D3D3; }
.hides:hover .hoverShow { background-color: #111; border-color: #333; }
/* Log */
body#ViewLog pre { color: #777; background-color: #333; }

@ -208,6 +208,7 @@ div.ProgressBarFill {
background-color:#D3D3D3;
}
.hides:hover .hoverShow { background-color: #F8F8FF; }
/* Log */

@ -209,6 +209,7 @@ div.ProgressBarFill {
background-color:#D3D3D3;
}
.hides:hover .hoverShow { background-color: #F3F3F3; }
/* Log */

@ -202,6 +202,7 @@ a.bookmarkAction {
background-color:#D3D3D3;
}
.hides:hover .hoverShow { background-color: #F8F8F8; }
/* Log */

@ -218,6 +218,7 @@ div.ProgressBarFill {
background-color:#D3D3D3;
}
.hides:hover .hoverShow { background-color: #010; }
/* Log */

@ -50,6 +50,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.Set;
import java.util.logging.Handler;
import java.util.logging.LogManager;
@ -147,6 +148,18 @@ public class LogalizerHandler extends Handler {
return parsers.keySet();
}
public LogParser getParser(int number) {
Object o;
Iterator it = parsers.keySet().iterator();
int i = 0;
while (it.hasNext()) {
o = it.next();
if (i++ == number)
return (LogParser)parsers.get(o);
}
return null;
}
public Hashtable getParserResults(LogParser parsername) {
return parsername.getResults();
}

@ -51,121 +51,121 @@ import java.util.regex.Pattern;
public class LogParserPLASMA implements LogParser{
/** the version of the LogParser */
/** the version of the LogParser - <strong>Double</strong>*/
public static final String PARSER_VERSION = "version";
/** the amount of URLs received during DHT */
/** the amount of URLs received during DHT - <strong>Integer</strong> */
public static final String URLS_RECEIVED = "urlSum";
/** the amount of URLs requested during DHT */
/** the amount of URLs requested during DHT - <strong>Integer</strong> */
public static final String URLS_REQUESTED = "urlReqSum";
/** the amount of URLs blocked during DHT because they match the peer's blacklist */
/** the amount of URLs blocked during DHT because they match the peer's blacklist - <strong>Integer</strong> */
public static final String URLS_BLOCKED = "blockedURLSum";
/** the amount of words received during DHT */
/** the amount of words received during DHT - <strong>Integer</strong> */
public static final String WORDS_RECEIVED = "wordsSum";
/** the amount of RWIs received during DHT */
/** the amount of RWIs received during DHT - <strong>Integer</strong> */
public static final String RWIS_RECEIVED = "rwiSum";
/** the amount of RWIs blocked during DHT because their entries match the peer's blacklist */
/** the amount of RWIs blocked during DHT because their entries match the peer's blacklist - <strong>Integer</strong> */
public static final String RWIS_BLOCKED = "blockedRWISum";
/** total time receiving RWIs during DHT in milli seconds */
/** total time receiving RWIs during DHT in milli seconds - <strong>Long</strong> */
public static final String RWIS_RECEIVED_TIME = "rwiTimeSum";
/** total time receiving URLs during DHT in milli seconds */
/** total time receiving URLs during DHT in milli seconds - <strong>Long</strong> */
public static final String URLS_RECEIVED_TIME = "urlTimeSum";
/** the traffic sent during DHT in bytes */
/** the traffic sent during DHT in bytes - <strong>Long</strong> */
public static final String DHT_TRAFFIC_SENT = "DHTSendTraffic";
/** the amount of URLs requested by other peers and sent by this one */
/** the amount of URLs requested by other peers and sent by this one - <strong>Integer</strong> */
public static final String DHT_URLS_SENT = "DHTSendURLs";
/** the amount of rejected DHT transfers from other peers (i.e. because this peer was busy) */
/** the amount of rejected DHT transfers from other peers (i.e. because this peer was busy) - <strong>Integer</strong> */
public static final String DHT_REJECTED = "RWIRejectCount";
/** the amount of different peer-names from whose DHT transfers were rejected */
/** the peer-names from whose DHT transfers were rejected - <strong>HasSet</strong> */
public static final String DHT_REJECTED_PEERS_NAME = "DHTRejectPeerNames";
/** the amount of different peer-hashes from whose DHT transfers were rejected */
/** the peer-hashes from whose DHT transfers were rejected - <strong>HasSet</strong> */
public static final String DHT_REJECTED_PEERS_HASH = "DHTRejectPeerHashs";
/** the amount of different peer-names this peer sent DHT chunks to */
/** the peer-names this peer sent DHT chunks to - <strong>HasSet</strong> */
public static final String DHT_SENT_PEERS_NAME = "DHTPeerNames";
/** the amount of different peer-hashes this peer sent DHT chunks to */
/** the peer-hashes this peer sent DHT chunks to - <strong>HasSet</strong> */
public static final String DHT_SENT_PEERS_HASH = "DHTPeerHashs";
/** total amount of selected peers for index distribution */
/** total amount of selected peers for index distribution - <strong>Integer</strong> */
public static final String DHT_SELECTED = "DHTSelectionTargetCount";
/** total amount of words selected for index distribution */
/** total amount of words selected for index distribution - <strong>Integer</strong> */
public static final String DHT_WORDS_SELECTED = "DHTSelectionWordsCount";
/** total time selecting words for index distribution */
/** total time selecting words for index distribution - <strong>Integer</strong> */
public static final String DHT_WORDS_SELECTED_TIME = "DHTSelectionWordsTimeCount";
/** the minimal DHT distance during peer-selection for index distribution */
/** the minimal DHT distance during peer-selection for index distribution - <strong>Double</strong> */
public static final String DHT_DISTANCE_MIN = "minDHTDist";
/** the maximal DHT distance during peer-selection for index distribution */
/** the maximal DHT distance during peer-selection for index distribution - <strong>Double</strong> */
public static final String DHT_DISTANCE_MAX = "maxDHTDist";
/** the average DHT distance during peer-selection for index distribution */
/** the average DHT distance during peer-selection for index distribution - <strong>Double</strong> */
public static final String DHT_DISTANCE_AVERAGE = "avgDHTDist";
/** how many times remote peers were too busy to accept the index transfer */
/** how many times remote peers were too busy to accept the index transfer - <strong>Integer</strong> */
public static final String PEERS_BUSY = "busyPeerCount";
/** how many times not enough peers for index distribution were found */
/** how many times not enough peers for index distribution were found - <strong>Integer</strong> */
public static final String PEERS_TOO_LESS = "notEnoughDHTPeers";
/** how many times the index distribution failed (i.e. due to time-out or other reasons) */
/** how many times the index distribution failed (i.e. due to time-out or other reasons) - <strong>Integer</strong> */
public static final String DHT_SENT_FAILED = "failedIndexDistributionCount";
/** how many times the error "<code>tried to create left child-node twice</code>" occured */
/** how many times the error "<code>tried to create left child-node twice</code>" occured - <strong>Integer</strong> */
public static final String ERROR_CHILD_TWICE_LEFT = "leftChildTwiceCount";
/** how many times the error "<code>tried to create right child-node twice</code>" occured */
/** how many times the error "<code>tried to create right child-node twice</code>" occured - <strong>Integer</strong> */
public static final String ERROR_CHILD_TWICE_RIGHT = "rightChildTwiceCount";
/** how many ranking distributions were executed successfully */
/** how many ranking distributions were executed successfully - <strong>Integer</strong> */
public static final String RANKING_DIST = "rankingDistributionCount";
/** total time the ranking distributions took */
/** total time the ranking distributions took - <strong>Integer</strong> */
public static final String RANKING_DIST_TIME = "rankingDistributionTime";
/** how many ranking distributions failed */
/** how many ranking distributions failed - <strong>Integer</strong> */
public static final String RANKING_DIST_FAILED = "rankingDistributionFailCount";
/** how many times the error "<code>Malformed URL</code>" occured */
/** how many times the error "<code>Malformed URL</code>" occured - <strong>Integer</strong> */
public static final String ERROR_MALFORMED_URL = "malformedURLCount";
/** the amount of indexed sites */
/** the amount of indexed sites - <strong>Integer</strong> */
public static final String INDEXED_SITES = "indexedSites";
/** total amount of indexed words */
/** total amount of indexed words - <strong>Integer</strong> */
public static final String INDEXED_WORDS = "indexedWords";
/** total size of all indexed sites */
/** total size of all indexed sites - <strong>Integer</strong> */
public static final String INDEXED_SITES_SIZE = "indexedSiteSizeSum";
/** total amount of indexed anchors */
/** total amount of indexed anchors - <strong>Integer</strong> */
public static final String INDEXED_ANCHORS = "indexedAnchors";
/** total time needed for stacking the site of an indexing */
/** total time needed for stacking the site of an indexing - <strong>Integer</strong> */
public static final String INDEXED_STACK_TIME = "indexedStackingTime";
/** total time needed for parsing during indexing */
/** total time needed for parsing during indexing - <strong>Integer</strong> */
public static final String INDEXED_PARSE_TIME = "indexedParsingTime";
/** total time needed for the actual indexing during indexing */
/** total time needed for the actual indexing during indexing - <strong>Integer</strong> */
public static final String INDEXED_INDEX_TIME = "indexedIndexingTime";
/** total time needed for storing the results of an indexing */
/** total time needed for storing the results of an indexing - <strong>Integer</strong> */
public static final String INDEXED_STORE_TIME = "indexedStorageTime";
@ -411,7 +411,7 @@ public class LogParserPLASMA implements LogParser{
results.put(DHT_WORDS_SELECTED_TIME , new Integer(DHTSelectionWordsTimeCount));
results.put(DHT_DISTANCE_MIN , new Double(minDHTDist));
results.put(DHT_DISTANCE_MAX , new Double(maxDHTDist));
results.put(DHT_DISTANCE_AVERAGE , new Double(avgDHTDist));
results.put(DHT_DISTANCE_AVERAGE , new Double(avgDHTDist / DHTSelectionTargetCount));
results.put(PEERS_BUSY , new Integer(busyPeerCount));
results.put(PEERS_TOO_LESS , new Integer(notEnoughDHTPeers));
results.put(DHT_SENT_FAILED , new Integer(failedIndexDistributionCount));

Loading…
Cancel
Save