Some corrections...

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@409 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
rramthun 20 years ago
parent 9f505af7aa
commit 0f11399d16

@ -84,18 +84,18 @@ public class transferURL {
for (int i = 0; i < urlc; i++) {
urls = (String) post.get("url" + i);
if (urls == null) {
yacyCore.log.logDebug("transferURL: got null url-String from peer " + youare);
yacyCore.log.logDebug("transferURL: got null URL-string from peer " + youare);
} else {
lEntry = switchboard.urlPool.loadedURL.newEntry(urls, true);
if ((lEntry != null) && (blockBlacklist)) {
if (switchboard.urlBlacklist.isListed(lEntry.url().getHost().toLowerCase(), lEntry.url().getPath())) {
yacyCore.log.logDebug("transferURL: blocked blacklisted url '" + lEntry.url() + "' from peer " + youare);
yacyCore.log.logDebug("transferURL: blocked blacklisted URL '" + lEntry.url() + "' from peer " + youare);
lEntry = null;
}
}
if (lEntry != null) {
switchboard.urlPool.loadedURL.addEntry(lEntry, iam, iam, 3);
yacyCore.log.logDebug("transferURL: received url '" + lEntry.url() + "' from peer " + youare);
yacyCore.log.logDebug("transferURL: received URL '" + lEntry.url() + "' from peer " + youare);
received++;
}
}
@ -106,8 +106,8 @@ public class transferURL {
// return rewrite properties
int more = switchboard.urlPool.loadedURL.size() - sizeBefore;
doublevalues = "" + (received - more);
switchboard.getLog().logInfo("Received " + received + " URL's from peer " + iam);
if ((received - more) > 0) switchboard.getLog().logError("Received " + doublevalues + " double URL's from peer " + iam);
switchboard.getLog().logInfo("Received " + received + " URLs from peer " + iam);
if ((received - more) > 0) switchboard.getLog().logError("Received " + doublevalues + " double URLs from peer " + iam);
result = "ok";
} else {
result = "error_not_granted";

@ -227,7 +227,7 @@ public final class plasmaCrawlWorker extends Thread {
// trying to close all still open httpc-Sockets first
int closedSockets = httpc.closeOpenSockets(this);
if (closedSockets > 0) {
this.log.logInfo(closedSockets + " http-client sockets of thread '" + this.getName() + "' closed.");
this.log.logInfo(closedSockets + " HTTP-client sockets of thread '" + this.getName() + "' closed.");
}
} catch (Exception e) {}
}
@ -416,7 +416,7 @@ public final class plasmaCrawlWorker extends Thread {
} else {
// this may happen if the targeted host does not exist or anything with the
// remote server was wrong.
log.logError("CRAWLER LOADER ERROR2 with url=" + url.toString() + ": " + e.toString(),e);
log.logError("CRAWLER LOADER ERROR2 with URL=" + url.toString() + ": " + e.toString(),e);
}
} finally {
if (remote != null) httpc.returnInstance(remote);

@ -281,7 +281,7 @@ public abstract class serverAbstractThread extends Thread implements serverThrea
public void notifyThread() {
if (this.syncObject != null) {
synchronized(this.syncObject) {
if (this.log != null) this.log.logDebug("thread '" + this.getName() + "' has received a notification from thead '" + Thread.currentThread().getName() + "'.");
if (this.log != null) this.log.logDebug("thread '" + this.getName() + "' has received a notification from thread '" + Thread.currentThread().getName() + "'.");
this.syncObject.notifyAll();
}
}

@ -674,7 +674,7 @@ public final class serverCore extends serverAbstractThread implements serverThre
// trying to close all still open httpc-Sockets first
int closedSockets = httpc.closeOpenSockets(this);
if (closedSockets > 0) {
serverCore.this.log.logInfo(closedSockets + " http-client sockets of thread '" + this.getName() + "' closed.");
serverCore.this.log.logInfo(closedSockets + " HTTP-client sockets of thread '" + this.getName() + "' closed.");
}
// closing the socket to the client
@ -972,7 +972,7 @@ public final class serverCore extends serverAbstractThread implements serverThre
}
} // end of while
} catch (java.lang.ClassNotFoundException e) {
System.out.println("Internal Error: wrapper class not found: " + e.getMessage());
System.out.println("Internal error: Wrapper class not found: " + e.getMessage());
System.exit(0);
} catch (java.io.IOException e) {
// connection interruption: more or less normal

@ -56,7 +56,7 @@ htLocaleSelection=default
# for example http://<fileHost>/ shall access the file servlet and
# return the defaultFile at rootPath
# either way, http://<fileHost>/ denotes the same as http://localhost:<port>/
# for the preconfigured value 'localpeer', the url is:
# for the preconfigured value 'localpeer', the URL is:
# http://localpeer/
fileHost = localpeer
@ -117,12 +117,12 @@ listsPath=DATA/LISTS
# the path to the SKINS files.
skinPath=DATA/SKINS
# the yellow-list; url's elements
# (the core of an url; like 'yahoo' in 'de.yahoo.com')
# the yellow-list; URL's elements
# (the core of an URL; like 'yahoo' in 'de.yahoo.com')
# appearing in this list will not get a manipulated user agent string
proxyYellowList=yacy.yellow
# the black-list; url's appearing in this list will not be loaded;
# the black-list; URLs appearing in this list will not be loaded;
# instead always a 404 is returned
# all these files will be placed in the listsPath
proxyBlackLists=url.default.black
@ -133,7 +133,7 @@ proxyCookieWhiteList=cookie.default.black
# the blue-list;
# no search result is locally presented that has any word of the bluelist
# in the search words, the url or the url's description
# in the search words, the URL or the URL's description
plasmaBlueList=yacy.blue
# this proxy may in turn again access another proxy
@ -219,7 +219,7 @@ adminAccountBase64MD5=
# This superseed file is available then at two localtions:
# - it is included in every distribution and
# - updated through a specific URL-location
# we see the file name and the url of the superseed here:
# we see the file name and the URL of the superseed here:
superseedFile=superseed.txt
superseedLocation=http://www.yacy.net/superseed.txt
@ -337,11 +337,11 @@ defaultLinkReceiveFrequency=30
# prefetch parameters
# the prefetch depth assigns a specific depth to the prefetch mechanism
# prefetch of 0 means no prefetch; a prefetch of 1 means to prefetch all
# embedded url's, but since embedded image links are loaded by the browser
# embedded URLs, but since embedded image links are loaded by the browser
# this means that only embedded anchors are prefetched additionally
# a prefetch of 2 would result in loading of all images and anchor pages
# of all embedded anchors. Be careful with this value, since even a prefetch
# of 2 would result in hundreds of prefetched urls for each single proxy fill.
# of 2 would result in hundreds of prefetched URLs for each single proxy fill.
proxyPrefetchDepth=0
proxyStoreHTCache=true
@ -354,7 +354,7 @@ crawlingDepth=2
localIndexing=true
# Filter for crawlinig; may be used to restrict a crawl to a specific domain
# URL's are only indexed and further crawled if they match this filter
# URLs are only indexed and further crawled if they match this filter
crawlingFilter=.*
crawlingQ=false
storeHTCache=false

Loading…
Cancel
Save