- added LogStatistics_p.html servlet based on the logalizer (indexing values not functional yet due to charset/regex problems)
add the following to DATA/LOG/yacy.logging: --- # Properties for the LogalizerHandler de.anomic.server.logging.LogalizerHandler.enabled = true de.anomic.server.logging.LogalizerHandler.debug = false de.anomic.server.logging.LogalizerHandler.parserPackage = de.anomic.server.logging.logParsers --- and "de.anomic.server.logging.LogalizerHandler" to the list of global handlers git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@3219 6c8d7289-2bf4-0310-a012-ef5d649a1542pull/1/head
parent
ce6493e265
commit
71112b1fe6
@ -0,0 +1,102 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
||||
<head>
|
||||
<title>YaCy '#[clientname]#': Log Statistics</title>
|
||||
#%env/templates/metas.template%#
|
||||
</head>
|
||||
<body id="ViewProfile">
|
||||
#%env/templates/header.template%#
|
||||
<h2>Log statistics<!--of the last #[parsedLines]# lines--></h2>
|
||||
|
||||
<form method="get" action="LogStatistics_p.html">
|
||||
#(results)#::
|
||||
<fieldset><legend>DHT</legend>
|
||||
<fieldset>
|
||||
<legend>URLs</legend>
|
||||
<dl style="margin-top: .3em;">
|
||||
<dt>URLs Received:</dt><dd>#[urlSum]#</dd>
|
||||
<dt>URLs Requested:</dt><dd>#[urlReqSum]#</dd>
|
||||
<dt>URLs Blocked:</dt><dd>#[blockedURLSum]#</dd>
|
||||
<dt>Total time:</dt><dd>#[urlTimeSum]# #[urlTimeSumUnit]#</dd>
|
||||
<dt>URLs Sent:</dt><dd>#[DHTSendURLs]#</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>RWIs / Words</legend>
|
||||
<dl style="margin-top: .3em;">
|
||||
<dt>Words received:</dt><dd>#[wordsSum]#</dd>
|
||||
<dt>RWIs reveived:</dt><dd>#[rwiSum]#</dd>
|
||||
<dt>RWIs blocked:</dt><dd>#[blockedRWISum]#</dd>
|
||||
<dt>Total time:</dt><dd>#[rwiTimeSum]# #[rwiTimeSumUnit]#</dd>
|
||||
<dt>RWIs selected:</dt><dd>#[DHTSelectionWordsCount]#</dd>
|
||||
<dt>Selection time:</dt><dd>#[DHTSelectionWordsTimeCount]#</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
|
||||
<fieldset>
|
||||
<legend>Chunks / Protocol</legend>
|
||||
<dl style="margin-top: .3em;">
|
||||
<dt>Sent traffic:</dt><dd>#[DHTSendTraffic]# #[DHTSendTrafficUnit]#</dd>
|
||||
<dt>Total peers selected:</dt><dd>#[DHTSelectionTargetCount]#</dd>
|
||||
<dt>DHT Distance: Min/Average/Max:</dt><dd>#[minDHTDist]# / #[avgDHTDist]# / #[maxDHTDist]#</dd>
|
||||
<dt>Not enough peers found for distribution:</dt><dd>#[notEnoughDHTPeers]#</dd>
|
||||
<dt>Peers busy:</dt><dd>#[busyPeerCount]#</dd>
|
||||
<dt>Distribution failed:</dt><dd>#[failedIndexDistributionCount]#</dd>
|
||||
<dt>Rejected index receives:</dt><dd>#[RWIRejectCount]#</dd>
|
||||
<dt>Rejected index receives from:</dt>
|
||||
<dd class="hides">
|
||||
#[DHTRejectPeers]# peers#(useDHTRejectPeers)#::
|
||||
<div class="hoverShow">
|
||||
<ul class="tagList">#{DHTRejectPeers}#
|
||||
<li>( <span class="tt">#[hash]#</span> ) #[name]#</li>#{/DHTRejectPeers}#
|
||||
</ul>
|
||||
</div>#(/useDHTRejectPeers)#
|
||||
</dd>
|
||||
<dt>Sent indexes:</dt>
|
||||
<dd class="hides">
|
||||
#[DHTPeers]# peers#(useDHTPeers)#::
|
||||
<div class="hoverShow">
|
||||
<ul class="tagList">#{DHTPeers}#
|
||||
<li>( <span class="tt">#[hash]#</span> ) #[name]#</li>#{/DHTPeers}#
|
||||
</ul>
|
||||
</div>#(/useDHTPeers)#
|
||||
</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
</fieldset>
|
||||
|
||||
<fieldset><legend>Ranking Distribution</legend>
|
||||
<dl>
|
||||
<dt>Submitted ranking files:</dt><dd>#[rankingDistributionCount]#</dd>
|
||||
<dt>Total time submitting ranking files:</dt><dd>#[rankingDistributionTime]#</dd>
|
||||
<dt>Failed ranking distributions:</dt><dd>#[rankingDistributionFailCount]#</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
|
||||
<fieldset><legend>Indexing</legend>
|
||||
<dl>
|
||||
<dt>Indexed sites:</dt><dd>#[indexedSites]#</dd>
|
||||
<dt>Size of indexed sites:</dt><dd>#[indexedSiteSizeSum]# #[indexedSiteSizeSumUnit]#</dd>
|
||||
<dt>Indexed words:</dt><dd>#[indexedWords]#</dd>
|
||||
<dt>Indexed anchors:</dt><dd>#[indexedAnchors]#</dd>
|
||||
<dt>Total stacking time:</dt><dd>#[indexedStackingTime]# #[indexedStackingTimeUnit]#</dd>
|
||||
<dt>Total parsing time:</dt><dd>#[indexedParsingTime]# #[indexedParsingTimeUnit]#</dd>
|
||||
<dt>Total indexing time:</dt><dd>#[indexedIndexingTime]# #[indexedIndexingTimeUnit]#</dd>
|
||||
<dt>Total storage time:</dt><dd>#[indexedStorageTime]# #[indexedStorageTimeUnit]#</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
|
||||
<fieldset><legend>Errors</legend>
|
||||
<dl>
|
||||
<dt><span class="tt">Tried to create left child node twice</span></dt><dd>#[leftChildTwiceCount]#</dd>
|
||||
<dt><span class="tt">Tried to create right child node twice</span></dt><dd>#[rightChildTwiceCount]#</dd>
|
||||
<dt><span class="tt">Malformed URL Exception</span></dt><dd>#[malformedURLCount]#</dd>
|
||||
</dl>
|
||||
</fieldset>
|
||||
#(/results)#
|
||||
</form>
|
||||
|
||||
#%env/templates/footer.template%#
|
||||
</body>
|
||||
</html>
|
@ -0,0 +1,195 @@
|
||||
// LogStatistic_p.java
|
||||
// -----------------------
|
||||
// part of YaCy
|
||||
// (C) by Michael Peter Christen; mc@anomic.de
|
||||
// first published on http://www.anomic.de
|
||||
// Frankfurt, Germany, 2005
|
||||
// Created 16.01.2007
|
||||
//
|
||||
// This File is contributed by Franz Brauße
|
||||
//
|
||||
// $LastChangedDate: 2007-01-17 12:00:00 +0100 (Di, 17 Jan 2007) $
|
||||
// $LastChangedRevision: 3216 $
|
||||
// $LastChangedBy: karlchenofhell $
|
||||
//
|
||||
// This program is free software; you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation; either version 2 of the License, or
|
||||
// (at your option) any later version.
|
||||
//
|
||||
// This program is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
//
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with this program; if not, write to the Free Software
|
||||
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||
//
|
||||
// Using this software in any meaning (reading, learning, copying, compiling,
|
||||
// running) means that you agree that the Author(s) is (are) not responsible
|
||||
// for cost, loss of data or any harm that may be caused directly or indirectly
|
||||
// by usage of this softare or this documentation. The usage of this software
|
||||
// is on your own risk. The installation and usage (starting/running) of this
|
||||
// software may allow other people or application to access your computer and
|
||||
// any attached devices and is highly dependent on the configuration of the
|
||||
// software which must be done by the user of the software; the author(s) is
|
||||
// (are) also not responsible for proper configuration and usage of the
|
||||
// software, even if provoked by documentation provided together with
|
||||
// the software.
|
||||
//
|
||||
// Any changes to this file according to the GPL as documented in the file
|
||||
// gpl.txt aside this file in the shipment you received can be done to the
|
||||
// lines that follows this copyright notice here, but changes must not be
|
||||
// done inside the copyright notive above. A re-distribution must contain
|
||||
// the intact and unchanged copyright notice.
|
||||
// Contributions and changes to the program code must be marked as such.
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Hashtable;
|
||||
import java.util.logging.Handler;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import de.anomic.http.httpHeader;
|
||||
import de.anomic.server.serverObjects;
|
||||
import de.anomic.server.serverSwitch;
|
||||
import de.anomic.server.logging.LogalizerHandler;
|
||||
import de.anomic.server.logging.logParsers.LogParser;
|
||||
import de.anomic.server.logging.logParsers.LogParserPLASMA;
|
||||
|
||||
public class LogStatistics_p {
|
||||
|
||||
private static final String RESULTS = "results_";
|
||||
|
||||
public static serverObjects respond(httpHeader header, serverObjects post, serverSwitch env) {
|
||||
|
||||
final serverObjects prop = new serverObjects();
|
||||
|
||||
Logger logger = Logger.getLogger("");
|
||||
Handler[] handlers = logger.getHandlers();
|
||||
Hashtable r = null;
|
||||
for (int i=0; i<handlers.length; i++) {
|
||||
if (handlers[i] instanceof LogalizerHandler) {
|
||||
LogalizerHandler h = ((LogalizerHandler)handlers[i]);
|
||||
r = h.getParserResults(h.getParser(0));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (r == null) {
|
||||
prop.put("results", 0);
|
||||
return prop;
|
||||
} else {
|
||||
prop.put("results", 1);
|
||||
String[] t;
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_AVERAGE, r.get(LogParserPLASMA.DHT_DISTANCE_AVERAGE));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_MAX, r.get(LogParserPLASMA.DHT_DISTANCE_MAX));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_DISTANCE_MIN, r.get(LogParserPLASMA.DHT_DISTANCE_MIN));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_REJECTED, r.get(LogParserPLASMA.DHT_REJECTED));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_SELECTED, r.get(LogParserPLASMA.DHT_SELECTED));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_SENT_FAILED, r.get(LogParserPLASMA.DHT_SENT_FAILED));
|
||||
t = transformMem(((Long)r.get(LogParserPLASMA.DHT_TRAFFIC_SENT)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_TRAFFIC_SENT, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_TRAFFIC_SENT + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_URLS_SENT, r.get(LogParserPLASMA.DHT_URLS_SENT));
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED, r.get(LogParserPLASMA.DHT_WORDS_SELECTED));
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.DHT_WORDS_SELECTED_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.DHT_WORDS_SELECTED_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.ERROR_CHILD_TWICE_LEFT, r.get(LogParserPLASMA.ERROR_CHILD_TWICE_LEFT));
|
||||
prop.put(RESULTS + LogParserPLASMA.ERROR_CHILD_TWICE_RIGHT, r.get(LogParserPLASMA.ERROR_CHILD_TWICE_RIGHT));
|
||||
prop.put(RESULTS + LogParserPLASMA.ERROR_MALFORMED_URL, r.get(LogParserPLASMA.ERROR_MALFORMED_URL));
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_ANCHORS, r.get(LogParserPLASMA.INDEXED_ANCHORS));
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_INDEX_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_INDEX_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_INDEX_TIME + "Unit", t[1]);
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_PARSE_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_PARSE_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_PARSE_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES, r.get(LogParserPLASMA.INDEXED_SITES));
|
||||
t = transformMem(((Integer)r.get(LogParserPLASMA.INDEXED_SITES_SIZE)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES_SIZE, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_SITES_SIZE + "Unit", t[1]);
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_STACK_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_STACK_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_STACK_TIME + "Unit", t[1]);
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.INDEXED_STORE_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_STORE_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_STORE_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.INDEXED_WORDS, r.get(LogParserPLASMA.INDEXED_WORDS));
|
||||
prop.put(RESULTS + LogParserPLASMA.PEERS_BUSY, r.get(LogParserPLASMA.PEERS_BUSY));
|
||||
prop.put(RESULTS + LogParserPLASMA.PEERS_TOO_LESS, r.get(LogParserPLASMA.PEERS_TOO_LESS));
|
||||
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST, r.get(LogParserPLASMA.RANKING_DIST));
|
||||
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_FAILED, r.get(LogParserPLASMA.RANKING_DIST_FAILED));
|
||||
t = transformTime(((Integer)r.get(LogParserPLASMA.RANKING_DIST_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.RANKING_DIST_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.RWIS_BLOCKED, r.get(LogParserPLASMA.RWIS_BLOCKED));
|
||||
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED, r.get(LogParserPLASMA.RWIS_RECEIVED));
|
||||
t = transformTime(((Long)r.get(LogParserPLASMA.RWIS_RECEIVED_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.RWIS_RECEIVED_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.URLS_BLOCKED, r.get(LogParserPLASMA.URLS_BLOCKED));
|
||||
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED, r.get(LogParserPLASMA.URLS_RECEIVED));
|
||||
t = transformTime(((Long)r.get(LogParserPLASMA.URLS_RECEIVED_TIME)).longValue());
|
||||
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED_TIME, t[0]);
|
||||
prop.put(RESULTS + LogParserPLASMA.URLS_RECEIVED_TIME + "Unit", t[1]);
|
||||
prop.put(RESULTS + LogParserPLASMA.URLS_REQUESTED, r.get(LogParserPLASMA.URLS_REQUESTED));
|
||||
prop.put(RESULTS + LogParserPLASMA.WORDS_RECEIVED, r.get(LogParserPLASMA.WORDS_RECEIVED));
|
||||
|
||||
Object[] names = ((HashSet)r.get(LogParserPLASMA.DHT_REJECTED_PEERS_NAME)).toArray();
|
||||
Object[] hashes = ((HashSet)r.get(LogParserPLASMA.DHT_REJECTED_PEERS_HASH)).toArray();
|
||||
int i = 0;
|
||||
for (; i<names.length && i<hashes.length; i++) {
|
||||
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers_" + i + "_name", names[i]);
|
||||
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers_" + i + "_hash", hashes[i]);
|
||||
}
|
||||
prop.put(RESULTS + "DHTRejectPeers", i);
|
||||
prop.put(RESULTS + "useDHTRejectPeers", (i > 0) ? 1 : 0);
|
||||
prop.put(RESULTS + "useDHTRejectPeers_DHTRejectPeers", i);
|
||||
|
||||
names = ((HashSet)r.get(LogParserPLASMA.DHT_SENT_PEERS_NAME)).toArray();
|
||||
hashes = ((HashSet)r.get(LogParserPLASMA.DHT_SENT_PEERS_HASH)).toArray();
|
||||
i = 0;
|
||||
for (; i<names.length && i<hashes.length; i++) {
|
||||
prop.put(RESULTS + "useDHTPeers_DHTPeers_" + i + "_name", names[i]);
|
||||
prop.put(RESULTS + "useDHTPeers_DHTPeers_" + i + "_hash", hashes[i]);
|
||||
}
|
||||
prop.put(RESULTS + "DHTPeers", i);
|
||||
prop.put(RESULTS + "useDHTPeers", (i > 0) ? 1 : 0);
|
||||
prop.put(RESULTS + "useDHTPeers_DHTPeers", i);
|
||||
|
||||
return prop;
|
||||
}
|
||||
}
|
||||
|
||||
private static final String MILLISECONDS = "ms";
|
||||
private static final String SECONDS = "sec";
|
||||
private static final String MINUTES = "min";
|
||||
private static final String HOURS = "h";
|
||||
private static final String DAYS = "days";
|
||||
|
||||
private static final String[] units = new String[] { "Bytes", "KiloBytes", "MegaBytes", "GigaBytes" };
|
||||
|
||||
private static String[] transformTime(long timems) {
|
||||
if (timems > 1000) timems /= 1000; else return new String[] { Long.toString(timems), MILLISECONDS };
|
||||
if (timems > 60) timems /= 60; else return new String[] { Long.toString(timems), SECONDS };
|
||||
if (timems > 60) timems /= 60; else return new String[] { Long.toString(timems), MINUTES };
|
||||
if (timems > 24) timems /= 24; else return new String[] { Long.toString(timems), HOURS };
|
||||
return new String[] { Long.toString(timems), DAYS };
|
||||
}
|
||||
|
||||
private static String[] transformMem(long mem) {
|
||||
int i;
|
||||
for (i=0; i<units.length && mem >= 10240; i++)
|
||||
mem /= 1024;
|
||||
return new String[] { Long.toString(mem), units[i] };
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
Loading…
Reference in new issue