added logging conditions to all fine and finest log line calls

this will prevent an overhead for the generation of the log lines in case that they then are not printed

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5102 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 17 years ago
parent d3d41e2ee4
commit 05dbba4bab

@ -74,7 +74,7 @@ public class rct_p {
final String urlRejectReason = sb.acceptURL(url);
if (urlRejectReason == null) {
// stack url
sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
if (sb.getLog().isFinest()) sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
final String reasonString = sb.crawlStacker.stackCrawl(url, referrer, peerhash, "REMOTE-CRAWLING", loaddate, 0, sb.webIndex.defaultRemoteProfile);
if (reasonString == null) {

@ -70,8 +70,10 @@ public final class transfer {
if ((otherseed == null) || (filename.indexOf("..") >= 0)) {
// reject unknown peers: this does not appear fair, but anonymous senders are dangerous
// reject paths that contain '..' because they are dangerous
if (otherseed == null) sb.getLog().logFine("RankingTransmission: rejected unknown peer '" + otherpeer + "', current IP " + header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, "unknown"));
if (filename.indexOf("..") >= 0) sb.getLog().logFine("RankingTransmission: rejected wrong path '" + filename + "' from peer " + (otherseed == null ? "null" : otherseed.getName() + "/" + otherseed.getPublicAddress()) + ", current IP " + header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, "unknown"));
if (sb.getLog().isFine()) {
if (otherseed == null) sb.getLog().logFine("RankingTransmission: rejected unknown peer '" + otherpeer + "', current IP " + header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, "unknown"));
if (filename.indexOf("..") >= 0) sb.getLog().logFine("RankingTransmission: rejected wrong path '" + filename + "' from peer " + (otherseed == null ? "null" : otherseed.getName() + "/" + otherseed.getPublicAddress()) + ", current IP " + header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, "unknown"));
}
return prop;
}
@ -91,7 +93,7 @@ public final class transfer {
prop.put("process_path", ""); // currently empty; the store process will find a path
prop.put("process_maxsize", "-1"); // if response is too big we return the size of the file
sb.rankingPermissions.put(serverCodings.encodeMD5Hex(kelondroBase64Order.standardCoder.encodeString(access)), filename);
sb.getLog().logFine("RankingTransmission: granted peer " + otherpeerName + " to send CR file " + filename);
if (sb.getLog().isFine()) sb.getLog().logFine("RankingTransmission: granted peer " + otherpeerName + " to send CR file " + filename);
}
return prop;
}
@ -109,7 +111,7 @@ public final class transfer {
if ((grantedFile == null) || (!(grantedFile.equals(filename)))) {
// fraud-access of this interface
prop.put("response", "denied");
sb.getLog().logFine("RankingTransmission: denied " + otherpeerName + " to send CR file " + filename + ": wrong access code");
if (sb.getLog().isFine()) sb.getLog().logFine("RankingTransmission: denied " + otherpeerName + " to send CR file " + filename + ": wrong access code");
} else {
sb.rankingPermissions.remove(accesscode); // not needed any more
final File path = new File(sb.rankingPath, plasmaRankingDistribution.CR_OTHER);
@ -121,10 +123,10 @@ public final class transfer {
final String md5t = serverCodings.encodeMD5Hex(file);
if (md5t.equals(md5)) {
prop.put("response", "ok");
sb.getLog().logFine("RankingTransmission: received from peer " + otherpeerName + " CR file " + filename);
if (sb.getLog().isFine()) sb.getLog().logFine("RankingTransmission: received from peer " + otherpeerName + " CR file " + filename);
} else {
prop.put("response", "transfer failure");
sb.getLog().logFine("RankingTransmission: transfer failure from peer " + otherpeerName + " for CR file " + filename);
if (sb.getLog().isFine()) sb.getLog().logFine("RankingTransmission: transfer failure from peer " + otherpeerName + " for CR file " + filename);
}
}else{
//exploit?
@ -140,7 +142,7 @@ public final class transfer {
}
// wrong access
sb.getLog().logFine("RankingTransmission: rejected unknown process " + process + ":" + purpose + " from peer " + otherpeerName);
if (sb.getLog().isFine()) sb.getLog().logFine("RankingTransmission: rejected unknown process " + process + ":" + purpose + " from peer " + otherpeerName);
return prop;
}

@ -115,7 +115,7 @@ public final class transferRWI {
} */ else {
// we want and can receive indexes
// log value status (currently added to find outOfMemory error
sb.getLog().logFine("Processing " + indexes.length + " bytes / " + wordc + " words / " + entryc + " entries from " + otherPeerName);
if (sb.getLog().isFine()) sb.getLog().logFine("Processing " + indexes.length + " bytes / " + wordc + " words / " + entryc + " entries from " + otherPeerName);
final long startProcess = System.currentTimeMillis();
// decode request
@ -157,7 +157,7 @@ public final class transferRWI {
// block blacklisted entries
if ((blockBlacklist) && (plasmaSwitchboard.urlBlacklist.hashInBlacklistedCache(indexReferenceBlacklist.BLACKLIST_DHT, urlHash))) {
yacyCore.log.logFine("transferRWI: blocked blacklisted URLHash '" + urlHash + "' from peer " + otherPeerName);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferRWI: blocked blacklisted URLHash '" + urlHash + "' from peer " + otherPeerName);
blocked++;
continue;
}

@ -91,7 +91,7 @@ public final class transferURL {
// read new lurl-entry
urls = post.get("url" + i);
if (urls == null) {
yacyCore.log.logFine("transferURL: got null URL-string from peer " + otherPeerName);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferURL: got null URL-string from peer " + otherPeerName);
blocked++;
continue;
}
@ -114,14 +114,14 @@ public final class transferURL {
// check whether entry is too old
if (lEntry.freshdate().getTime() <= freshdate) {
yacyCore.log.logFine("transerURL: received too old URL from peer " + otherPeerName + ": " + lEntry.freshdate());
if (yacyCore.log.isFine()) yacyCore.log.logFine("transerURL: received too old URL from peer " + otherPeerName + ": " + lEntry.freshdate());
blocked++;
continue;
}
// check if the entry is blacklisted
if ((blockBlacklist) && (plasmaSwitchboard.urlBlacklist.isListed(indexReferenceBlacklist.BLACKLIST_DHT, comp.url()))) {
yacyCore.log.logFine("transferURL: blocked blacklisted URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferURL: blocked blacklisted URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName);
lEntry = null;
blocked++;
continue;
@ -130,7 +130,7 @@ public final class transferURL {
// check if the entry is in our network domain
final String urlRejectReason = sb.acceptURL(comp.url());
if (urlRejectReason != null) {
yacyCore.log.logFine("transferURL: blocked URL '" + comp.url() + "' (" + urlRejectReason + ") from peer " + otherPeerName);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferURL: blocked URL '" + comp.url() + "' (" + urlRejectReason + ") from peer " + otherPeerName);
lEntry = null;
blocked++;
continue;
@ -140,7 +140,7 @@ public final class transferURL {
try {
sb.webIndex.putURL(lEntry);
sb.crawlResults.stack(lEntry, iam, iam, 3);
yacyCore.log.logFine("transferURL: received URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName);
if (yacyCore.log.isFine()) yacyCore.log.logFine("transferURL: received URL '" + comp.url().toNormalform(false, true) + "' from peer " + otherPeerName);
received++;
} catch (final IOException e) {
e.printStackTrace();

@ -349,7 +349,7 @@ public class CrawlQueues {
final String urlRejectReason = sb.acceptURL(url);
if (urlRejectReason == null) {
// stack url
sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
if (sb.getLog().isFinest()) sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
final String reasonString = sb.crawlStacker.stackCrawl(url, referrer, hash, item.getDescription(), loaddate, 0, sb.webIndex.defaultRemoteProfile);
if (reasonString == null) {

@ -221,7 +221,7 @@ public final class CrawlStacker extends Thread {
if (profile == null) return;
// DEBUG
log.logFinest("ENQUEUE "+ nexturl +", referer="+referrerhash +", initiator="+initiatorHash +", name="+name +", load="+loadDate +", depth="+currentdepth);
if (log.isFinest()) log.logFinest("ENQUEUE "+ nexturl +", referer="+referrerhash +", initiator="+initiatorHash +", name="+name +", load="+loadDate +", depth="+currentdepth);
// check first before we create a big object
if (this.urlEntryCache.has(nexturl.hash().getBytes())) return;

@ -186,7 +186,7 @@ public class NoticeURLImporter extends AbstractImporter implements Importer {
}
if (this.urlCount % 100 == 0) {
this.log.logFine(this.urlCount + " URLs and '" + this.profileCount + "' profile entries processed so far.");
if (this.log.isFine()) this.log.logFine(this.urlCount + " URLs and '" + this.profileCount + "' profile entries processed so far.");
}
if (this.isAborted()) break;
}

@ -57,6 +57,7 @@ import de.anomic.yacy.yacyURL;
public class RobotsTxt {
public static final String ROBOTS_DB_PATH_SEPARATOR = ";";
private static final serverLog log = new serverLog("ROBOTS");
kelondroMap robotsTable;
private final File robotsTableFile;
@ -151,13 +152,13 @@ public class RobotsTxt {
try {
robotsURL = new yacyURL("http://" + urlHostPort + "/robots.txt", null);
} catch (final MalformedURLException e) {
serverLog.logSevere("ROBOTS","Unable to generate robots.txt URL for host:port '" + urlHostPort + "'.");
log.logSevere("Unable to generate robots.txt URL for host:port '" + urlHostPort + "'.");
robotsURL = null;
}
Object[] result = null;
if (robotsURL != null) {
serverLog.logFine("ROBOTS","Trying to download the robots.txt file from URL '" + robotsURL + "'.");
if (log.isFine()) log.logFine("Trying to download the robots.txt file from URL '" + robotsURL + "'.");
try {
result = downloadRobotsTxt(robotsURL, 5, robotsTxt4Host);
} catch (final Exception e) {
@ -533,7 +534,7 @@ public class RobotsTxt {
if (res.getStatusLine().startsWith("2")) {
if (!res.getResponseHeader().mime().startsWith("text/plain")) {
robotsTxt = null;
serverLog.logFinest("ROBOTS","Robots.txt from URL '" + robotsURL + "' has wrong mimetype '" + res.getResponseHeader().mime() + "'.");
if (log.isFinest()) log.logFinest("Robots.txt from URL '" + robotsURL + "' has wrong mimetype '" + res.getResponseHeader().mime() + "'.");
} else {
// getting some metadata
@ -542,7 +543,7 @@ public class RobotsTxt {
// if the robots.txt file was not changed we break here
if ((eTag != null) && (oldEtag != null) && (eTag.equals(oldEtag))) {
serverLog.logFinest("ROBOTS","Robots.txt from URL '" + robotsURL + "' was not modified. Abort downloading of new version.");
if (log.isFinest()) log.logFinest("Robots.txt from URL '" + robotsURL + "' was not modified. Abort downloading of new version.");
return null;
}
@ -556,7 +557,7 @@ public class RobotsTxt {
robotsTxt = sbb.getBytes();
downloadEnd = System.currentTimeMillis();
serverLog.logFinest("ROBOTS","Robots.txt successfully loaded from URL '" + robotsURL + "' in " + (downloadEnd-downloadStart) + " ms.");
if (log.isFinest()) log.logFinest("Robots.txt successfully loaded from URL '" + robotsURL + "' in " + (downloadEnd-downloadStart) + " ms.");
}
} else if (res.getStatusCode() == 304) {
return null;
@ -564,7 +565,7 @@ public class RobotsTxt {
// getting redirection URL
String redirectionUrlString = res.getResponseHeader().get(httpRequestHeader.LOCATION);
if (redirectionUrlString==null) {
serverLog.logFinest("ROBOTS","robots.txt could not be downloaded from URL '" + robotsURL + "' because of missing redirecton header. [" + res.getStatusLine() + "].");
if (log.isFinest()) log.logFinest("robots.txt could not be downloaded from URL '" + robotsURL + "' because of missing redirecton header. [" + res.getStatusLine() + "].");
robotsTxt = null;
} else {
@ -574,15 +575,15 @@ public class RobotsTxt {
final yacyURL redirectionUrl = yacyURL.newURL(robotsURL, redirectionUrlString);
// following the redirection
serverLog.logFinest("ROBOTS","Redirection detected for robots.txt with URL '" + robotsURL + "'." +
if (log.isFinest()) log.logFinest("Redirection detected for robots.txt with URL '" + robotsURL + "'." +
"\nRedirecting request to: " + redirectionUrl);
return downloadRobotsTxt(redirectionUrl,redirectionCount,entry);
}
} else if (res.getStatusCode() == 401 || res.getStatusCode() == 403) {
accessCompletelyRestricted = true;
serverLog.logFinest("ROBOTS","Access to Robots.txt not allowed on URL '" + robotsURL + "'.");
if (log.isFinest()) log.logFinest("Access to Robots.txt not allowed on URL '" + robotsURL + "'.");
} else {
serverLog.logFinest("ROBOTS","robots.txt could not be downloaded from URL '" + robotsURL + "'. [" + res.getStatusLine() + "].");
if (log.isFinest()) log.logFinest("robots.txt could not be downloaded from URL '" + robotsURL + "'. [" + res.getStatusLine() + "].");
robotsTxt = null;
}
} catch (final Exception e) {

@ -172,7 +172,7 @@ public class SitemapParser extends DefaultHandler {
InputStream contentStream = res.getDataAsStream();
if ((contentMimeType != null) &&
(contentMimeType.equals("application/x-gzip") || contentMimeType.equals("application/gzip"))) {
this.logger.logFine("Sitemap file has mimetype " + contentMimeType);
if (this.logger.isFine()) this.logger.logFine("Sitemap file has mimetype " + contentMimeType);
contentStream = new GZIPInputStream(contentStream);
}

@ -356,7 +356,7 @@ public class blogBoard {
try {
final String date = record.get("date");
if (date == null) {
serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (serverLog.isFinest("Blog")) serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
return new Date();
}
return serverDate.parseShortSecond(date);
@ -372,7 +372,7 @@ public class blogBoard {
public String getTimestamp() {
final String timestamp = record.get("date");
if (timestamp == null) {
serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (serverLog.isFinest("Blog")) serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
return serverDate.formatShortSecond();
}
return timestamp;

@ -262,7 +262,7 @@ public class blogBoardComments {
try {
final String date = record.get("date");
if (date == null) {
serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (serverLog.isFinest("Blog")) serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
return new Date();
}
synchronized (SimpleFormatter) {
@ -276,7 +276,7 @@ public class blogBoardComments {
public String getTimestamp() {
final String timestamp = record.get("date");
if (timestamp == null) {
serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (serverLog.isFinest("Blog")) serverLog.logFinest("Blog", "ERROR: date field missing in blogBoard");
return dateString(new Date());
}
return timestamp;

@ -71,7 +71,7 @@ public class translator {
} else {
//Filename not available, but it will be printed in Log
//after all untranslated Strings as "Translated file: "
serverLog.logFine("TRANSLATOR", "Unused String: "+key);
if (serverLog.isFine("TRANSLATOR")) serverLog.logFine("TRANSLATOR", "Unused String: "+key);
}
}
return result;

@ -163,7 +163,7 @@ public class HttpConnectionInfo {
allConnections.remove(con);
}
}
serverLog.logFine("HTTPC", "cleanUp ConnectionInfo removed "+ (sizeBefore - allConnections.size()));
if (serverLog.isFine("HTTPC")) serverLog.logFine("HTTPC", "cleanUp ConnectionInfo removed "+ (sizeBefore - allConnections.size()));
}
} catch (final java.util.ConcurrentModificationException e) {
serverLog.logWarning("HTTPC", "cleanUp ConnectionInfo interrupted by ConcurrentModificationException");

@ -861,7 +861,7 @@ public final class httpd implements serverHandler, Cloneable {
b.write(Integer.parseInt(s.substring(i + 2, end)));
i += end - i;
} else { // 'named' smybols
log.logFine("discovered yet unimplemented HTML entity '" + s.substring(i, end + 1) + "'");
if (log.isFine()) log.logFine("discovered yet unimplemented HTML entity '" + s.substring(i, end + 1) + "'");
b.write(s.charAt(i));
}
} else {

@ -73,7 +73,6 @@ import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import java.util.zip.GZIPOutputStream;
import de.anomic.htmlFilter.htmlFilterContentScraper;
@ -677,11 +676,9 @@ public final class httpdFileHandler {
// storing the content into the cache
ref = new SoftReference<byte[]>(templateContent);
templateCache.put(targetFile, ref);
if (theLogger.isLoggable(Level.FINEST))
theLogger.logFinest("Cache MISS for file " + targetFile);
if (theLogger.isFinest()) theLogger.logFinest("Cache MISS for file " + targetFile);
} else {
if (theLogger.isLoggable(Level.FINEST))
theLogger.logFinest("Cache HIT for file " + targetFile);
if (theLogger.isFinest()) theLogger.logFinest("Cache HIT for file " + targetFile);
}
// creating an inputstream needed by the template

@ -327,8 +327,8 @@ public final class httpdProxyHandler {
yacyURL url = null;
try {
url = httpHeader.getRequestURL(conProp);
theLogger.logFine(reqID +" GET "+ url);
theLogger.logFinest(reqID +" header: "+ requestHeader);
if (theLogger.isFine()) theLogger.logFine(reqID +" GET "+ url);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" header: "+ requestHeader);
//redirector
if (redirectorEnabled){
@ -342,7 +342,7 @@ public final class httpdProxyHandler {
url = new yacyURL(newUrl, null);
} catch(final MalformedURLException e){}//just keep the old one
}
theLogger.logFinest(reqID +" using redirector to "+ url);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" using redirector to "+ url);
conProp.setProperty(httpHeader.CONNECTION_PROP_HOST, url.getHost()+":"+url.getPort());
conProp.setProperty(httpHeader.CONNECTION_PROP_PATH, url.getPath());
requestHeader.put(httpHeader.HOST, url.getHost()+":"+url.getPort());
@ -431,17 +431,17 @@ public final class httpdProxyHandler {
if (yacyCore.getOnlineMode() == 0) {
if (cacheExists) {
theLogger.logFinest(reqID +" fulfill request from cache");
if (theLogger.isFinest()) theLogger.logFinest(reqID +" fulfill request from cache");
fulfillRequestFromCache(conProp,url,ext,requestHeader,cachedResponseHeader,cacheFile,countedRespond);
} else {
theLogger.logInfo("URL not availabe in Cache"+" and not in online-mode!");
httpd.sendRespondError(conProp,countedRespond,4,404,null,"URL not availabe in Cache",null);
}
} else if (cacheExists && cacheEntry.shallUseCacheForProxy()) {
theLogger.logFinest(reqID +" fulfill request from cache");
if (theLogger.isFinest()) theLogger.logFinest(reqID +" fulfill request from cache");
fulfillRequestFromCache(conProp,url,ext,requestHeader,cachedResponseHeader,cacheFile,countedRespond);
} else {
theLogger.logFinest(reqID +" fulfill request from web");
if (theLogger.isFinest()) theLogger.logFinest(reqID +" fulfill request from web");
fulfillRequestFromWeb(conProp,url,ext,requestHeader,cachedResponseHeader,cacheFile,countedRespond);
}
@ -515,7 +515,7 @@ public final class httpdProxyHandler {
// send request
try {
res = client.GET(getUrl);
theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
if (theLogger.isFinest()) theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
conProp.put(httpHeader.CONNECTION_PROP_CLIENT_REQUEST_HEADER, requestHeader);
final httpResponseHeader responseHeader = res.getResponseHeader();
@ -557,8 +557,8 @@ public final class httpdProxyHandler {
// handle file types and make (possibly transforming) output stream
final OutputStream outStream = (gzippedOut != null) ? gzippedOut : ((chunkedOut != null)? chunkedOut : respond);
final boolean isBinary = isBinary(responseHeader);
if(isBinary) {
theLogger.logFine(reqID +" create direct passthrough for URL " + url + ", extension '" + ext + "', mime-type '" + responseHeader.mime() + "'");
if (isBinary) {
if (theLogger.isFine()) theLogger.logFine(reqID +" create direct passthrough for URL " + url + ", extension '" + ext + "', mime-type '" + responseHeader.mime() + "'");
} else {
// handle text stuff (encoding and so on)
if (
@ -566,13 +566,13 @@ public final class httpdProxyHandler {
(plasmaParser.supportedHTMLContent(url,responseHeader.mime()))
) {
// make a transformer
theLogger.logFine(reqID +" create transformer for URL " + url);
if (theLogger.isFine()) theLogger.logFine(reqID +" create transformer for URL " + url);
//hfos = new htmlFilterOutputStream((gzippedOut != null) ? gzippedOut : ((chunkedOut != null)? chunkedOut : respond), null, transformer, (ext.length() == 0));
final Charset charSet = responseHeader.getCharSet();
textOutput = new htmlFilterWriter(outStream,charSet, null, transformer, (ext.length() == 0));
} else {
// simply pass through without parsing
theLogger.logFine(reqID +" create text passthrough for URL " + url + ", extension '" + ext + "', mime-type '" + responseHeader.mime() + "'");
if (theLogger.isFine()) theLogger.logFine(reqID +" create text passthrough for URL " + url + ", extension '" + ext + "', mime-type '" + responseHeader.mime() + "'");
textOutput = new OutputStreamWriter(outStream, responseHeader.getCharSet());
}
}
@ -587,7 +587,7 @@ public final class httpdProxyHandler {
responseHeader.put(httpResponseHeader.TRANSFER_ENCODING, "chunked");
}
theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
httpd.sendRespondHeader(
conProp,
respond,
@ -630,7 +630,7 @@ public final class httpdProxyHandler {
} else {
cacheArray = null;
}
theLogger.logFine(reqID +" writeContent of " + url + " produced cacheArray = " + ((cacheArray == null) ? "null" : ("size=" + cacheArray.length)));
if (theLogger.isFine()) theLogger.logFine(reqID +" writeContent of " + url + " produced cacheArray = " + ((cacheArray == null) ? "null" : ("size=" + cacheArray.length)));
if (textOutput instanceof htmlFilterWriter) ((htmlFilterWriter) textOutput).close();
@ -665,7 +665,7 @@ public final class httpdProxyHandler {
writeTextContent(res, new BufferedWriter(textOutput), fileStream);
}
if (textOutput instanceof htmlFilterWriter) ((htmlFilterWriter) textOutput).close();
theLogger.logFine(reqID +" for write-file of " + url + ": contentLength = " + contentLength + ", sizeBeforeDelete = " + sizeBeforeDelete);
if (theLogger.isFine()) theLogger.logFine(reqID +" for write-file of " + url + ": contentLength = " + contentLength + ", sizeBeforeDelete = " + sizeBeforeDelete);
plasmaHTCache.writeFileAnnouncement(cacheFile);
if (sizeBeforeDelete == -1) {
// totally fresh file
@ -688,7 +688,7 @@ public final class httpdProxyHandler {
}
} else {
// no caching
theLogger.logFine(reqID +" "+ cacheFile.toString() + " not cached." +
if (theLogger.isFine()) theLogger.logFine(reqID +" "+ cacheFile.toString() + " not cached." +
" StoreError=" + ((storeError==null)?"None":storeError) +
" StoreHTCache=" + storeHTCache +
" SupportetContent=" + isSupportedContent);
@ -932,8 +932,8 @@ public final class httpdProxyHandler {
httpd.sendRespondError(conProp,respond,4,501,null,errorMsg,e);
return;
}
theLogger.logFine(reqID +" HEAD "+ url);
theLogger.logFinest(reqID +" header: "+ requestHeader);
if (theLogger.isFine()) theLogger.logFine(reqID +" HEAD "+ url);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" header: "+ requestHeader);
// check the blacklist, inspired by [AS]: respond a 404 for all AGIS (all you get is shit) servers
final String hostlow = host.toLowerCase();
@ -961,14 +961,14 @@ public final class httpdProxyHandler {
// generate request-url
final String connectHost = hostPart(host, port, yAddress);
final String getUrl = "http://"+ connectHost + remotePath;
theLogger.logFinest(reqID +" using url: "+ getUrl);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" using url: "+ getUrl);
final JakartaCommonsHttpClient client = setupHttpClient(requestHeader, connectHost);
// send request
try {
res = client.HEAD(getUrl);
theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
if (theLogger.isFinest()) theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
// determine if it's an internal error of the httpc
final httpResponseHeader responseHeader = res.getResponseHeader();
@ -979,7 +979,7 @@ public final class httpdProxyHandler {
prepareResponseHeader(responseHeader, res.getHttpVer());
// sending the server respond back to the client
theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
httpd.sendRespondHeader(conProp,respond,httpVer,res.getStatusCode(),res.getStatusLine().substring(4),responseHeader);
respond.flush();
} finally {
@ -1032,8 +1032,8 @@ public final class httpdProxyHandler {
httpd.sendRespondError(conProp,countedRespond,4,501,null,errorMsg,e);
return;
}
theLogger.logFine(reqID +" POST "+ url);
theLogger.logFinest(reqID +" header: "+ requestHeader);
if (theLogger.isFine()) theLogger.logFine(reqID +" POST "+ url);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" header: "+ requestHeader);
prepareRequestHeader(conProp, requestHeader, host.toLowerCase());
@ -1049,7 +1049,7 @@ public final class httpdProxyHandler {
final String connectHost = hostPart(host, port, yAddress);
final String getUrl = "http://"+ connectHost + remotePath;
theLogger.logFinest(reqID +" using url: "+ getUrl);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" using url: "+ getUrl);
final JakartaCommonsHttpClient client = setupHttpClient(requestHeader, connectHost);
@ -1079,7 +1079,7 @@ public final class httpdProxyHandler {
try {
// sending the request
res = client.POST(getUrl, body);
theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
if (theLogger.isFinest()) theLogger.logFinest(reqID +" response status: "+ res.getStatusLine());
final httpResponseHeader responseHeader = res.getResponseHeader();
// determine if it's an internal error of the httpc
@ -1097,7 +1097,7 @@ public final class httpdProxyHandler {
}
// sending response headers
theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
if (theLogger.isFinest()) theLogger.logFinest(reqID +" sending response header: "+ responseHeader);
httpd.sendRespondHeader(conProp,
countedRespond,
httpVer,
@ -1539,7 +1539,7 @@ public final class httpdProxyHandler {
final String exceptionMsg = e.getMessage();
if ((exceptionMsg != null) && (exceptionMsg.indexOf("Corrupt GZIP trailer") >= 0)) {
// just do nothing, we leave it this way
theLogger.logFine("ignoring bad gzip trail for URL " + url + " (" + e.getMessage() + ")");
if (theLogger.isFine()) theLogger.logFine("ignoring bad gzip trail for URL " + url + " (" + e.getMessage() + ")");
forceConnectionClose(conProp);
} else if ((exceptionMsg != null) && (exceptionMsg.indexOf("Connection reset")>= 0)) {
errorMessage = "Connection reset";
@ -1801,7 +1801,7 @@ public final class httpdProxyHandler {
logMessage.append(mime);
// sending the logging message to the logger
proxyLog.logFine(logMessage.toString());
if (proxyLog.isFine()) proxyLog.logFine(logMessage.toString());
}
}

@ -319,20 +319,20 @@ public final class indexRepositoryReference {
}
final indexURLReference entry = eiter.next();
if (entry == null) {
serverLog.logFine("URLDBCLEANER", "entry == null");
if (serverLog.isFine("URLDBCLEANER")) serverLog.logFine("URLDBCLEANER", "entry == null");
} else if (entry.hash() == null) {
serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + "hash == null");
if (serverLog.isFine("URLDBCLEANER")) serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + "hash == null");
} else {
final indexURLReference.Components comp = entry.comp();
totalSearchedUrls++;
if (comp.url() == null) {
serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + entry.hash() + "URL == null");
if (serverLog.isFine("URLDBCLEANER")) serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + entry.hash() + "URL == null");
remove(entry.hash());
} else if (blacklist.isListed(indexReferenceBlacklist.BLACKLIST_CRAWLER, comp.url()) ||
blacklist.isListed(indexReferenceBlacklist.BLACKLIST_DHT, comp.url())) {
lastBlacklistedUrl = comp.url().toNormalform(true, true);
lastBlacklistedHash = entry.hash();
serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + entry.hash() + " " + comp.url().toNormalform(false, true));
if (serverLog.isFine("URLDBCLEANER")) serverLog.logFine("URLDBCLEANER", ++blacklistedUrls + " blacklisted (" + ((double) blacklistedUrls / totalSearchedUrls) * 100 + "%): " + entry.hash() + " " + comp.url().toNormalform(false, true));
remove(entry.hash());
if (blacklistedUrls % 100 == 0) {
serverLog.logInfo("URLDBCLEANER", "Deleted " + blacklistedUrls + " URLs until now. Last deleted URL-Hash: " + lastBlacklistedUrl);

@ -344,7 +344,7 @@ public final class kelondroBLOBHeap implements kelondroBLOB {
// add the entry to the index
this.index.putl(key, entry.seek);
System.out.println("*** DEBUG BLOB: replaced-fit record at " + entry.seek + ", reclen=" + reclen + ", key=" + new String(key));
//System.out.println("*** DEBUG BLOB: replaced-fit record at " + entry.seek + ", reclen=" + reclen + ", key=" + new String(key));
// finished!
return;
@ -391,7 +391,7 @@ public final class kelondroBLOBHeap implements kelondroBLOB {
// add a new free entry
free.add(new gap(lseek + 4 + reclen, newfreereclen));
System.out.println("*** DEBUG BLOB: replaced-split record at " + lseek + ", reclen=" + reclen + ", new reclen=" + newfreereclen + ", key=" + new String(key));
//System.out.println("*** DEBUG BLOB: replaced-split record at " + lseek + ", reclen=" + reclen + ", new reclen=" + newfreereclen + ", key=" + new String(key));
// finished!
return;

@ -76,8 +76,7 @@ public class kelondroCachedRA extends kelondroAbstractRA implements kelondroRA {
final Integer element = it.next();
writeCache(cacheMemory.get(element), element.intValue());
cacheMemory.remove(element);
final int age = cacheScore.deleteScore(element);
de.anomic.server.logging.serverLog.logFine("CACHE: " + name, "GC; age=" + ((((int) (0xFFFFFFFFL & System.currentTimeMillis())) - age) / 1000));
cacheScore.deleteScore(element);
}
// add new element
cache = new byte[cacheElementSize];

@ -116,7 +116,7 @@ public class SZParserExtractCallback extends ArchiveExtractCallback {
u = entry.getKey().toNormalform(true, true);
if (u.startsWith(base + "/")) {
final String ref = "#" + u.substring(base.length() + 1);
this.log.logFinest("changing " + entry.getKey() + " to use reference " + ref);
if (this.log.isFinest()) this.log.logFinest("changing " + entry.getKey() + " to use reference " + ref);
nanchors.put(new yacyURL(base + ref, null), entry.getValue());
} else {
nanchors.put(entry.getKey(), entry.getValue());

@ -212,7 +212,7 @@ public class vcfParser extends AbstractParser implements Parser {
}
} else {
this.theLogger.logFinest("Invalid data in vcf file" +
if (theLogger.isFinest()) this.theLogger.logFinest("Invalid data in vcf file" +
"\n\tURL: " + location +
"\n\tLine: " + line +
"\n\tLine-Nr: " + lineNr);

@ -980,7 +980,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
* Testing if the content type is supported by the available parsers
* ========================================================================= */
final boolean isSupportedContent = plasmaParser.supportedContent(entry.url(),entry.getMimeType());
log.logFinest("STORE "+ entry.url() +" content of type "+ entry.getMimeType() +" is supported: "+ isSupportedContent);
if (log.isFinest()) log.logFinest("STORE "+ entry.url() +" content of type "+ entry.getMimeType() +" is supported: "+ isSupportedContent);
/* =========================================================================
* INDEX CONTROL HEADER
@ -991,8 +991,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
boolean doIndexing = true;
if (entry.requestProhibitsIndexing()) {
doIndexing = false;
if (this.log.isFine())
this.log.logFine("Crawling of " + entry.url() + " prohibited by request.");
if (this.log.isFine()) this.log.logFine("Crawling of " + entry.url() + " prohibited by request.");
}
/* =========================================================================
@ -1518,7 +1517,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
in.queueEntry.updateStatus(IndexingStack.QUEUE_STATE_PARSING);
// debug
log.logFinest("PARSE "+ in.queueEntry.toString());
if (log.isFinest()) log.logFinest("PARSE "+ in.queueEntry.toString());
plasmaParserDocument document = null;
try {
@ -1601,7 +1600,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
in.queueEntry.updateStatus(IndexingStack.QUEUE_STATE_CONDENSING);
// debug
log.logFinest("CONDENSE "+ in.queueEntry.toString());
if (log.isFinest()) log.logFinest("CONDENSE "+ in.queueEntry.toString());
plasmaCondenser condenser = null;
try {

@ -271,7 +271,7 @@ public final class serverCore extends serverAbstractBusyThread implements server
if (bindIP.startsWith("#")) {
final String interfaceName = bindIP.substring(1);
String hostName = null;
this.log.logFine("Trying to determine IP address of interface '" + interfaceName + "'.");
if (this.log.isFine()) this.log.logFine("Trying to determine IP address of interface '" + interfaceName + "'.");
final Enumeration<NetworkInterface> interfaces = NetworkInterface.getNetworkInterfaces();
if (interfaces != null) {
@ -322,7 +322,7 @@ public final class serverCore extends serverAbstractBusyThread implements server
// prepare for new connection
// idleThreadCheck();
this.switchboard.handleBusyState(this.busySessions.size());
this.log.logFinest("* waiting for connections, " + this.busySessions.size() + " sessions running");
if (log.isFinest()) this.log.logFinest("* waiting for connections, " + this.busySessions.size() + " sessions running");
announceThreadBlockApply();
@ -549,7 +549,7 @@ public final class serverCore extends serverAbstractBusyThread implements server
*/
public void log(final boolean outgoing, final String request) {
serverCore.this.log.logFine(this.userAddress.getHostAddress() + "/" + this.identity + " " +
if (serverCore.this.log.isFine()) serverCore.this.log.logFine(this.userAddress.getHostAddress() + "/" + this.identity + " " +
"[" + ((busySessions == null)? -1 : busySessions.size()) + ", " + this.commandCounter +
((outgoing) ? "] > " : "] < ") +
request);
@ -992,22 +992,22 @@ public final class serverCore extends serverAbstractBusyThread implements server
this.log.logInfo("Initializing SSL support ...");
// creating a new keystore instance of type (java key store)
this.log.logFine("Initializing keystore ...");
if (this.log.isFine()) this.log.logFine("Initializing keystore ...");
final KeyStore ks = KeyStore.getInstance("JKS");
// loading keystore data from file
this.log.logFine("Loading keystore file " + keyStoreFileName);
if (this.log.isFine()) this.log.logFine("Loading keystore file " + keyStoreFileName);
final FileInputStream stream = new FileInputStream(keyStoreFileName);
ks.load(stream, keyStorePwd.toCharArray());
stream.close();
// creating a keystore factory
this.log.logFine("Initializing key manager factory ...");
if (this.log.isFine()) this.log.logFine("Initializing key manager factory ...");
final KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmf.init(ks,keyStorePwd.toCharArray());
// initializing the ssl context
this.log.logFine("Initializing SSL context ...");
if (this.log.isFine()) this.log.logFine("Initializing SSL context ...");
final SSLContext sslcontext = SSLContext.getInstance("TLS");
sslcontext.init(kmf.getKeyManagers(), null, null);

@ -139,7 +139,7 @@ public final class yacyClient {
}
if (result == null || result.size() < 3) {
yacyCore.log.logFine("yacyClient.publishMySeed result error: " +
if (yacyCore.log.isFine()) yacyCore.log.logFine("yacyClient.publishMySeed result error: " +
((result == null) ? "result null" : ("result=" + result.toString())));
return -1;
}
@ -155,7 +155,7 @@ public final class yacyClient {
} else {
otherPeer = yacySeed.genRemoteSeed(seed, salt, false);
if (otherPeer == null || !otherPeer.hash.equals(otherHash)) {
yacyCore.log.logFine("yacyClient.publishMySeed: consistency error: other peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' wrong");
if (yacyCore.log.isFine()) yacyCore.log.logFine("yacyClient.publishMySeed: consistency error: other peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' wrong");
return -1; // no success
}
}
@ -194,10 +194,10 @@ public final class yacyClient {
yacyCore.log.logInfo("yacyClient.publishMySeed: Peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' reported us as junior.");
} else if ((mytype.equalsIgnoreCase(yacySeed.PEERTYPE_SENIOR)) ||
(mytype.equalsIgnoreCase(yacySeed.PEERTYPE_PRINCIPAL))) {
yacyCore.log.logFine("yacyClient.publishMySeed: Peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' reported us as " + mytype + ", accepted other peer.");
if (yacyCore.log.isFine()) yacyCore.log.logFine("yacyClient.publishMySeed: Peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' reported us as " + mytype + ", accepted other peer.");
} else {
// wrong type report
yacyCore.log.logFine("yacyClient.publishMySeed: Peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' reported us as " + mytype + ", rejecting other peer.");
if (yacyCore.log.isFine()) yacyCore.log.logFine("yacyClient.publishMySeed: Peer '" + ((otherPeer==null)?"unknown":otherPeer.getName()) + "' reported us as " + mytype + ", rejecting other peer.");
return -1;
}
if (mySeed.orVirgin().equals(yacySeed.PEERTYPE_VIRGIN))
@ -482,7 +482,7 @@ public final class yacyClient {
}
if ((result == null) || (result.size() == 0)) {
yacyCore.log.logFine("SEARCH failed FROM "
if (yacyCore.log.isFine()) yacyCore.log.logFine("SEARCH failed FROM "
+ target.hash
+ ":"
+ target.getName()
@ -641,7 +641,7 @@ public final class yacyClient {
} catch (final NumberFormatException e) {
searchtime = totalrequesttime;
}
yacyCore.log.logFine("SEARCH "
if (yacyCore.log.isFine()) yacyCore.log.logFine("SEARCH "
+ results
+ " URLS FROM "
+ target.hash
@ -855,7 +855,7 @@ public final class yacyClient {
while (eenum.hasNext()) {
entry = eenum.next();
if (urlCache.get(entry.urlHash()) == null) {
yacyCore.log.logFine("DEBUG transferIndex: to-send url hash '" + entry.urlHash() + "' is not contained in urlCache");
if (yacyCore.log.isFine()) yacyCore.log.logFine("DEBUG transferIndex: to-send url hash '" + entry.urlHash() + "' is not contained in urlCache");
}
}
}
@ -898,7 +898,7 @@ public final class yacyClient {
for (int i = 0; i < uhs.length; i++) {
urls[i] = urlCache.get(uhs[i]);
if (urls[i] == null) {
yacyCore.log.logFine("DEBUG transferIndex: requested url hash '" + uhs[i] + "', unknownURL='" + uhss + "'");
if (yacyCore.log.isFine()) yacyCore.log.logFine("DEBUG transferIndex: requested url hash '" + uhs[i] + "', unknownURL='" + uhss + "'");
}
}

@ -152,7 +152,7 @@ public class yacyCore {
}
public final void publishSeedList() {
log.logFine("yacyCore.publishSeedList: Triggered Seed Publish");
if (log.isFine()) log.logFine("yacyCore.publishSeedList: Triggered Seed Publish");
/*
if (oldIPStamp.equals((String) seedDB.mySeed.get(yacySeed.IP, "127.0.0.1")))
@ -169,7 +169,7 @@ public class yacyCore {
(System.currentTimeMillis() - sb.webIndex.seedDB.lastSeedUpload_timeStamp < 1000 * 60 * 60 * 24) &&
(sb.webIndex.seedDB.mySeed().isPrincipal())
) {
log.logFine("yacyCore.publishSeedList: not necessary to publish: oldIP is equal, sizeConnected is equal and I can reach myself under the old IP.");
if (log.isFine()) log.logFine("yacyCore.publishSeedList: not necessary to publish: oldIP is equal, sizeConnected is equal and I can reach myself under the old IP.");
return;
}
@ -195,7 +195,7 @@ public class yacyCore {
if (seedUploadMethod.equals("")) {
this.sb.setConfig("seedUploadMethod", "none");
}
log.logFine("yacyCore.publishSeedList: No uploading method configured");
if (log.isFine()) log.logFine("yacyCore.publishSeedList: No uploading method configured");
return;
}
}
@ -274,7 +274,7 @@ public class yacyCore {
final yacySeed newSeed = sb.webIndex.seedDB.getConnected(this.seed.hash);
if (newSeed != null) {
if (!newSeed.isOnline()) {
log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " is not online." +
" Removing Peer from connected");
sb.webIndex.peerActions.peerDeparture(newSeed, "peer not online");
@ -282,13 +282,13 @@ public class yacyCore {
if (newSeed.getLastSeenUTC() < (System.currentTimeMillis() - 10000)) {
// update last seed date
if (newSeed.getLastSeenUTC() >= this.seed.getLastSeenUTC()) {
log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " with old LastSeen: '" +
serverDate.formatShortSecond(new Date(newSeed.getLastSeenUTC())) + "'");
newSeed.setLastSeenUTC();
sb.webIndex.peerActions.peerArrival(newSeed, true);
} else {
log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) +
" peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " with old LastSeen: '" +
serverDate.formatShortSecond(new Date(newSeed.getLastSeenUTC())) + "', this is more recent: '" +
serverDate.formatShortSecond(new Date(this.seed.getLastSeenUTC())) + "'");
@ -297,7 +297,7 @@ public class yacyCore {
}
}
} else {
log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) + " peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " not in connectedDB");
if (log.isFine()) log.logFine("publish: recently handshaked " + this.seed.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_SENIOR) + " peer '" + this.seed.getName() + "' at " + this.seed.getPublicAddress() + " not in connectedDB");
}
}
} catch (final Exception e) {
@ -407,7 +407,7 @@ public class yacyCore {
i++;
final String address = seed.getClusterAddress();
log.logFine("HELLO #" + i + " to peer '" + seed.get(yacySeed.NAME, "") + "' at " + address); // debug
if (log.isFine()) log.logFine("HELLO #" + i + " to peer '" + seed.get(yacySeed.NAME, "") + "' at " + address); // debug
final String seederror = seed.isProper(false);
if ((address == null) || (seederror != null)) {
// we don't like that address, delete it
@ -465,7 +465,7 @@ public class yacyCore {
}
}
}
log.logFine("DBSize before -> after Cleanup: " + dbSize + " -> " + amIAccessibleDB.size());
if (log.isFine()) log.logFine("DBSize before -> after Cleanup: " + dbSize + " -> " + amIAccessibleDB.size());
}
log.logInfo("PeerPing: I am accessible for " + accessible +
" peer(s), not accessible for " + notaccessible + " peer(s).");
@ -538,12 +538,12 @@ public class yacyCore {
threadCount = yacyCore.publishThreadGroup.enumerate(threadList);
// we need to use a timeout here because of missing interruptable session threads ...
log.logFine("publish: Waiting for " + yacyCore.publishThreadGroup.activeCount() + " remaining publishing threads to finish shutdown ...");
if (log.isFine()) log.logFine("publish: Waiting for " + yacyCore.publishThreadGroup.activeCount() + " remaining publishing threads to finish shutdown ...");
for (int currentThreadIdx = 0; currentThreadIdx < threadCount; currentThreadIdx++) {
final Thread currentThread = threadList[currentThreadIdx];
if (currentThread.isAlive()) {
log.logFine("publish: Waiting for remaining publishing thread '" + currentThread.getName() + "' to finish shutdown");
if (log.isFine()) log.logFine("publish: Waiting for remaining publishing thread '" + currentThread.getName() + "' to finish shutdown");
try { currentThread.join(500); } catch (final InterruptedException ex) {}
}
}
@ -710,7 +710,7 @@ public class yacyCore {
try {
sb.webIndex.seedDB.mySeed().put(yacySeed.PEERTYPE, yacySeed.PEERTYPE_PRINCIPAL); // this information shall also be uploaded
log.logFine("SaveSeedList: Using seed uploading method '" + seedUploadMethod + "' for seed-list uploading." +
if (log.isFine()) log.logFine("SaveSeedList: Using seed uploading method '" + seedUploadMethod + "' for seed-list uploading." +
"\n\tPrevious peerType is '" + sb.webIndex.seedDB.mySeed().get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_JUNIOR) + "'.");
// logt = seedDB.uploadCache(seedFTPServer, seedFTPAccount, seedFTPPassword, seedFTPPath, seedURL);

@ -358,9 +358,9 @@ public class yacyDHTAction {
firstdist = yacyDHTAction.dhtDistance(seed.hash, firstKey);
lastdist = yacyDHTAction.dhtDistance(seed.hash, lastKey);
if (lastdist > maxDist) {
if (log != null) log.logFine("Discarded too distant DHT target peer " + seed.getName() + ":" + seed.hash + ", distance2first = " + firstdist + ", distance2last = " + lastdist);
if (log != null && yacyCore.log.isFine()) log.logFine("Discarded too distant DHT target peer " + seed.getName() + ":" + seed.hash + ", distance2first = " + firstdist + ", distance2last = " + lastdist);
} else if (doublecheck.contains(seed.hash)) {
if (log != null) log.logFine("Discarded double DHT target peer " + seed.getName() + ":" + seed.hash + ", distance2first = " + firstdist + ", distance2last = " + lastdist);
if (log != null && yacyCore.log.isFine()) log.logFine("Discarded double DHT target peer " + seed.getName() + ":" + seed.hash + ", distance2first = " + firstdist + ", distance2last = " + lastdist);
} else {
if (log != null) log.logInfo("Selected " + ((seeds.size() < primaryPeerCount) ? "primary" : "reserve") + " DHT target peer " + seed.getName() + ":" + seed.hash + ", distance2first = " + firstdist + ", distance2last = " + lastdist);
seeds.add(seed);

@ -76,26 +76,26 @@ public class yacyPeerActions {
if ((peerType.equals(yacySeed.PEERTYPE_VIRGIN)) || (peerType.equals(yacySeed.PEERTYPE_JUNIOR))) {
// reject unqualified seeds
yacyCore.log.logFine("connect: rejecting NOT QUALIFIED " + peerType + " seed " + seed.getName());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting NOT QUALIFIED " + peerType + " seed " + seed.getName());
return false;
}
if (!(peerType.equals(yacySeed.PEERTYPE_SENIOR) || peerType.equals(yacySeed.PEERTYPE_PRINCIPAL))) {
// reject unqualified seeds
yacyCore.log.logFine("connect: rejecting NOT QUALIFIED " + peerType + " seed " + seed.getName());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting NOT QUALIFIED " + peerType + " seed " + seed.getName());
return false;
}
final yacySeed doubleSeed = this.seedDB.lookupByIP(seed.getInetAddress(), true, false, false);
if ((doubleSeed != null) && (doubleSeed.getPort() == seed.getPort()) && (!(doubleSeed.hash.equals(seed.hash)))) {
// a user frauds with his peer different peer hashes
yacyCore.log.logFine("connect: rejecting FRAUD (double hashes " + doubleSeed.hash + "/" + seed.hash + " on same port " + seed.getPort() + ") peer " + seed.getName());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting FRAUD (double hashes " + doubleSeed.hash + "/" + seed.hash + " on same port " + seed.getPort() + ") peer " + seed.getName());
return false;
}
if (seed.get(yacySeed.LASTSEEN, "").length() != 14) {
// hack for peers that do not have a LastSeen date
seed.setLastSeenUTC();
yacyCore.log.logFine("connect: reset wrong date (" + seed.getName() + "/" + seed.hash + ")");
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: reset wrong date (" + seed.getName() + "/" + seed.hash + ")");
}
// connection time
@ -110,7 +110,7 @@ public class yacyPeerActions {
}
if (Math.abs(nowUTC0Time - ctimeUTC0) > 60 * 60 * 24 * 1000) {
// the new connection is out-of-age, we reject the connection
yacyCore.log.logFine("connect: rejecting out-dated peer '" + seed.getName() + "' from " + seed.getPublicAddress() + "; nowUTC0=" + nowUTC0Time + ", seedUTC0=" + ctimeUTC0 + ", TimeDiff=" + serverDate.formatInterval(Math.abs(nowUTC0Time - ctimeUTC0)));
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting out-dated peer '" + seed.getName() + "' from " + seed.getPublicAddress() + "; nowUTC0=" + nowUTC0Time + ", seedUTC0=" + ctimeUTC0 + ", TimeDiff=" + serverDate.formatInterval(Math.abs(nowUTC0Time - ctimeUTC0)));
return false;
}
@ -147,13 +147,13 @@ public class yacyPeerActions {
if (!direct) {
if (ctimeUTC0 < dtimeUTC0) {
// the disconnection was later, we reject the connection
yacyCore.log.logFine("connect: rejecting disconnected peer '" + seed.getName() + "' from " + seed.getPublicAddress());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting disconnected peer '" + seed.getName() + "' from " + seed.getPublicAddress());
return false;
}
}
// this is a return of a lost peer
yacyCore.log.logFine("connect: returned KNOWN " + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: returned KNOWN " + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
this.seedDB.addConnected(seed);
return true;
}
@ -164,7 +164,7 @@ public class yacyPeerActions {
// if the old LastSeen date is later then the other
// info, then we reject the info
if ((ctimeUTC0 < (connectedSeed.getLastSeenUTC())) && (!direct)) {
yacyCore.log.logFine("connect: rejecting old info about peer '" + seed.getName() + "'");
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting old info about peer '" + seed.getName() + "'");
return false;
}
@ -172,10 +172,10 @@ public class yacyPeerActions {
// TODO: update seed name lookup cache
}*/
} catch (final NumberFormatException e) {
yacyCore.log.logFine("connect: rejecting wrong peer '" + seed.getName() + "' from " + seed.getPublicAddress() + ". Cause: " + e.getMessage());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: rejecting wrong peer '" + seed.getName() + "' from " + seed.getPublicAddress() + ". Cause: " + e.getMessage());
return false;
}
yacyCore.log.logFine("connect: updated KNOWN " + ((direct) ? "direct " : "") + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: updated KNOWN " + ((direct) ? "direct " : "") + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
seedDB.addConnected(seed);
return true;
}
@ -184,10 +184,10 @@ public class yacyPeerActions {
if ((seedDB.mySeedIsDefined()) && (seed.getIP().equals(this.seedDB.mySeed().getIP()))) {
// seed from the same IP as the calling client: can be
// the case if there runs another one over a NAT
yacyCore.log.logFine("connect: saved NEW seed (myself IP) " + seed.getPublicAddress());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: saved NEW seed (myself IP) " + seed.getPublicAddress());
} else {
// completely new seed
yacyCore.log.logFine("connect: saved NEW " + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: saved NEW " + peerType + " peer '" + seed.getName() + "' from " + seed.getPublicAddress());
}
this.seedDB.addConnected(seed);
return true;
@ -208,7 +208,7 @@ public class yacyPeerActions {
public void peerDeparture(final yacySeed peer, final String cause) {
if (peer == null) return;
// we do this if we did not get contact with the other peer
yacyCore.log.logFine("connect: no contact to a " + peer.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN) + " peer '" + peer.getName() + "' at " + peer.getPublicAddress() + ". Cause: " + cause);
if (yacyCore.log.isFine()) yacyCore.log.logFine("connect: no contact to a " + peer.get(yacySeed.PEERTYPE, yacySeed.PEERTYPE_VIRGIN) + " peer '" + peer.getName() + "' at " + peer.getPublicAddress() + ". Cause: " + cause);
synchronized (seedDB) {
if (!seedDB.hasDisconnected(peer.hash)) { disconnects++; }
peer.put("dct", Long.toString(System.currentTimeMillis()));

@ -195,7 +195,7 @@ public class yacySearch extends Thread {
distance = yacyDHTAction.dhtDistance(seed.hash, wordhash);
if (distance > 0.2) continue; // catch bug in peer selection
if (!seed.getFlagAcceptRemoteIndex()) continue; // probably a robinson peer
serverLog.logFine("PLASMA", "selectPeers/DHTorder: " + seed.hash + ":" + seed.getName() + "/" + distance + " for wordhash " + wordhash + ", score " + c);
if (serverLog.isFine("PLASMA")) serverLog.logFine("PLASMA", "selectPeers/DHTorder: " + seed.hash + ":" + seed.getName() + "/" + distance + " for wordhash " + wordhash + ", score " + c);
ranking.addScore(seed.hash, c--);
regularSeeds.put(seed.hash, seed);
}
@ -210,7 +210,7 @@ public class yacySearch extends Thread {
if (seed == null) continue;
if (!seed.getFlagAcceptRemoteIndex()) continue; // probably a robinson peer
score = (int) Math.round(Math.random() * ((c / 3) + 3));
serverLog.logFine("PLASMA", "selectPeers/RWIcount: " + seed.hash + ":" + seed.getName() + ", RWIcount=" + seed.get(yacySeed.ICOUNT,"") + ", score " + score);
if (serverLog.isFine("PLASMA")) serverLog.logFine("PLASMA", "selectPeers/RWIcount: " + seed.hash + ":" + seed.getName() + ", RWIcount=" + seed.get(yacySeed.ICOUNT,"") + ", score " + score);
ranking.addScore(seed.hash, score);
regularSeeds.put(seed.hash, seed);
c--;

@ -816,7 +816,7 @@ public class yacySeed implements Cloneable {
// check semantics of content
final String testResult = resultSeed.isProper(ownSeed);
if (testResult != null) {
yacyCore.log.logFinest("seed is not proper (" + testResult + "): " + resultSeed);
if (yacyCore.log.isFinest()) yacyCore.log.logFinest("seed is not proper (" + testResult + "): " + resultSeed);
return null;
}

@ -255,7 +255,7 @@ public final class yacySeedDB implements httpdAlternativeDomainNames {
protected synchronized kelondroMapDataMining resetSeedTable(kelondroMapDataMining seedDB, final File seedDBFile) {
// this is an emergency function that should only be used if any problem with the
// seed.db is detected
yacyCore.log.logFine("seed-db " + seedDBFile.toString() + " reset (on-the-fly)");
yacyCore.log.logWarning("seed-db " + seedDBFile.toString() + " reset (on-the-fly)");
seedDB.close();
if(!seedDBFile.delete())
serverLog.logWarning("yacySeedDB", "could not delete file "+ seedDBFile);
@ -397,7 +397,7 @@ public final class yacySeedDB implements httpdAlternativeDomainNames {
return result;
} catch (final kelondroException e) {
seedActiveDB = resetSeedTable(seedActiveDB, seedActiveDBFile);
yacyCore.log.logFine("Internal Error at yacySeedDB.seedsByAge: " + e.getMessage(), e);
if (yacyCore.log.isFine()) yacyCore.log.logFine("Internal Error at yacySeedDB.seedsByAge: " + e.getMessage(), e);
return null;
}
}
@ -811,15 +811,15 @@ public final class yacySeedDB implements httpdAlternativeDomainNames {
// create a seed file which for uploading ...
seedFile = File.createTempFile("seedFile",".txt", plasmaHTCache.cachePath);
seedFile.deleteOnExit();
serverLog.logFine("YACY","SaveSeedList: Storing seedlist into tempfile " + seedFile.toString());
if (serverLog.isFine("YACY")) serverLog.logFine("YACY", "SaveSeedList: Storing seedlist into tempfile " + seedFile.toString());
final ArrayList<String> uv = storeCache(seedFile, true);
// uploading the seed file
serverLog.logFine("YACY","SaveSeedList: Trying to upload seed-file, " + seedFile.length() + " bytes, " + uv.size() + " entries.");
if (serverLog.isFine("YACY")) serverLog.logFine("YACY", "SaveSeedList: Trying to upload seed-file, " + seedFile.length() + " bytes, " + uv.size() + " entries.");
log = uploader.uploadSeedFile(sb,seedDB,seedFile);
// test download
serverLog.logFine("YACY","SaveSeedList: Trying to download seed-file '" + seedURL + "'.");
if (serverLog.isFine("YACY")) serverLog.logFine("YACY", "SaveSeedList: Trying to download seed-file '" + seedURL + "'.");
final ArrayList<String> check = downloadSeedFile(seedURL);
// Comparing if local copy and uploaded copy are equal
@ -882,14 +882,14 @@ public final class yacySeedDB implements httpdAlternativeDomainNames {
private String checkCache(final ArrayList<String> uv, final ArrayList<String> check) {
if ((check == null) || (uv == null) || (uv.size() != check.size())) {
serverLog.logFine("YACY","SaveSeedList: Local and uploades seed-list " +
if (serverLog.isFine("YACY")) serverLog.logFine("YACY", "SaveSeedList: Local and uploades seed-list " +
"contains varying numbers of entries." +
"\n\tLocal seed-list: " + ((uv == null) ? "null" : Integer.toString(uv.size())) + " entries" +
"\n\tRemote seed-list: " + ((check == null) ? "null" : Integer.toString(check.size())) + " enties");
return "Entry count is different: uv.size() = " + ((uv == null) ? "null" : Integer.toString(uv.size())) + ", check = " + ((check == null) ? "null" : Integer.toString(check.size()));
}
serverLog.logFine("YACY","SaveSeedList: Comparing local and uploades seed-list entries ...");
if (serverLog.isFine("YACY")) serverLog.logFine("YACY", "SaveSeedList: Comparing local and uploades seed-list entries ...");
int i;
for (i = 0; i < uv.size(); i++) {
if (!((uv.get(i)).equals(check.get(i)))) return "Element at position " + i + " is different.";

Loading…
Cancel
Save