diff --git a/source/de/anomic/crawler/ImporterManager.java b/source/de/anomic/crawler/ImporterManager.java index c09dfeed2..2db701c0d 100644 --- a/source/de/anomic/crawler/ImporterManager.java +++ b/source/de/anomic/crawler/ImporterManager.java @@ -6,11 +6,14 @@ import de.anomic.kelondro.util.Log; public class ImporterManager { - public final Vector finishedJobs = new Vector(); - public final ThreadGroup runningJobs = new ThreadGroup("ImporterThreads"); - public int currMaxJobNr = 0; + public final Vector finishedJobs; + public final ThreadGroup runningJobs; + public int currMaxJobNr; public ImporterManager() { + this.finishedJobs = new Vector(); + this.runningJobs = new ThreadGroup("ImporterThreads"); + this.currMaxJobNr = 0; } public int generateUniqueJobID() { diff --git a/source/de/anomic/crawler/ResourceObserver.java b/source/de/anomic/crawler/ResourceObserver.java index 47599cc8f..d98ebf5d9 100644 --- a/source/de/anomic/crawler/ResourceObserver.java +++ b/source/de/anomic/crawler/ResourceObserver.java @@ -35,7 +35,7 @@ import de.anomic.tools.diskUsage; public final class ResourceObserver { // Unknown for now - //private final static long MIN_FREE_MEMORY = 0; + //private static final static long MIN_FREE_MEMORY = 0; // We are called with the cleanup job every five minutes; // the disk usage should be checked with every run private static final int CHECK_DISK_USAGE_FREQ = 1; @@ -47,7 +47,7 @@ public final class ResourceObserver { private static final int MEDIUM = 1; private static final int HIGH = 2; - private final Log log = new Log("RESOURCE OBSERVER"); + private static final Log log = new Log("RESOURCE OBSERVER"); private final plasmaSwitchboard sb; private int checkDiskUsageCount; @@ -62,7 +62,7 @@ public final class ResourceObserver { */ public ResourceObserver(final plasmaSwitchboard sb) { this.sb = sb; - this.log.logInfo("initializing the resource observer"); + log.logInfo("initializing the resource observer"); final ArrayList pathsToCheck = new ArrayList(); // FIXME whats about the secondary path??? @@ -84,7 +84,7 @@ public final class ResourceObserver { diskUsage.init(pathsToCheck); if (!diskUsage.isUsable()) - this.log.logWarning("Disk usage returned: " + diskUsage.getErrorMessage()); + log.logWarning("Disk usage returned: " + diskUsage.getErrorMessage()); checkDiskUsageCount = 0; checkMemoryUsageCount = 0; @@ -113,24 +113,24 @@ public final class ResourceObserver { if (tmpDisksFree < HIGH || tmpMemoryFree < HIGH) { if (!sb.crawlJobIsPaused(plasmaSwitchboardConstants.CRAWLJOB_LOCAL_CRAWL)) { - this.log.logInfo("pausing local crawls"); + log.logInfo("pausing local crawls"); sb.pauseCrawlJob(plasmaSwitchboardConstants.CRAWLJOB_LOCAL_CRAWL); } if (!sb.crawlJobIsPaused(plasmaSwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL)) { - this.log.logInfo("pausing remote triggered crawls"); + log.logInfo("pausing remote triggered crawls"); sb.pauseCrawlJob(plasmaSwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL); } if (tmpDisksFree == LOW && sb.getConfigBool(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, false)) { - this.log.logInfo("disabling index receive"); + log.logInfo("disabling index receive"); sb.setConfig(plasmaSwitchboardConstants.INDEX_RECEIVE_ALLOW, false); sb.webIndex.seedDB.mySeed().setFlagAcceptRemoteIndex(false); } } else { if (diskUsage.isUsable()) - this.log.logInfo("run completed; everything in order"); + log.logInfo("run completed; everything in order"); else - this.log.logInfo("The observer is out of order: " + diskUsage.getErrorMessage()); + log.logInfo("The observer is out of order: " + diskUsage.getErrorMessage()); } } @@ -173,9 +173,9 @@ public final class ResourceObserver { long[] val; for (final Map.Entry entry: usage.entrySet()) { val = entry.getValue(); - this.log.logInfo("df of Volume " + entry.getKey() + ": " + (val[1] / 1024 / 1024) + " MB"); + log.logInfo("df of Volume " + entry.getKey() + ": " + (val[1] / 1024 / 1024) + " MB"); if (val[1] < getMinFreeDiskSpace()) { - this.log.logWarning("Volume " + entry.getKey() + ": free space (" + (val[1] / 1024 / 1024) + " MB) is too low (< " + (getMinFreeDiskSpace() / 1024 / 1024) + " MB)"); + log.logWarning("Volume " + entry.getKey() + ": free space (" + (val[1] / 1024 / 1024) + " MB) is too low (< " + (getMinFreeDiskSpace() / 1024 / 1024) + " MB)"); ret = MEDIUM; } if (val[1] < Math.min(getMinFreeDiskSpace() / 5L, 100L)) { diff --git a/source/de/anomic/crawler/ZURL.java b/source/de/anomic/crawler/ZURL.java index 18e213d80..1125102e5 100755 --- a/source/de/anomic/crawler/ZURL.java +++ b/source/de/anomic/crawler/ZURL.java @@ -57,8 +57,8 @@ public class ZURL { 0); // the class object - ObjectIndex urlIndex = null; - private final LinkedList stack = new LinkedList(); // strings: url + private final ObjectIndex urlIndex; + private final LinkedList stack; public ZURL(final File cachePath, final String tablename, final boolean startWithEmptyFile) { // creates a new ZURL in a file @@ -69,13 +69,15 @@ public class ZURL { if (f.isDirectory()) FlexWidthArray.delete(cachePath, tablename); else f.delete(); } } - urlIndex = new EcoTable(f, rowdef, EcoTable.tailCacheDenyUsage, EcoFSBufferSize, 0); + this.urlIndex = new EcoTable(f, rowdef, EcoTable.tailCacheDenyUsage, EcoFSBufferSize, 0); //urlIndex = new kelondroFlexTable(cachePath, tablename, -1, rowdef, 0, true); + this.stack = new LinkedList(); } public ZURL() { // creates a new ZUR in RAM - urlIndex = new RowSet(rowdef, 0); + this.urlIndex = new RowSet(rowdef, 0); + this.stack = new LinkedList(); } public int size() { @@ -83,15 +85,13 @@ public class ZURL { } public void clear() throws IOException { - urlIndex.clear(); - stack.clear(); + if (urlIndex != null) urlIndex.clear(); + if (stack != null) stack.clear(); } public void close() { - if (urlIndex != null) { - urlIndex.close(); - urlIndex = null; - } + try {this.clear();} catch (IOException e) {} + if (urlIndex != null) urlIndex.close(); } public synchronized Entry newEntry( diff --git a/source/de/anomic/data/SitemapParser.java b/source/de/anomic/data/SitemapParser.java index 617e8c0fc..1224cca0e 100644 --- a/source/de/anomic/data/SitemapParser.java +++ b/source/de/anomic/data/SitemapParser.java @@ -114,7 +114,7 @@ public class SitemapParser extends DefaultHandler { /** * the logger */ - private final Log logger = new Log("SITEMAP"); + private static final Log logger = new Log("SITEMAP"); /** * The location of the sitemap file @@ -158,7 +158,7 @@ public class SitemapParser extends DefaultHandler { try { res = client.GET(siteMapURL.toString()); if (res.getStatusCode() != 200) { - this.logger.logWarning("Unable to download the sitemap file " + this.siteMapURL + + logger.logWarning("Unable to download the sitemap file " + this.siteMapURL + "\nServer returned status: " + res.getStatusLine()); return; } @@ -171,13 +171,13 @@ public class SitemapParser extends DefaultHandler { InputStream contentStream = res.getDataAsStream(); if ((contentMimeType != null) && (contentMimeType.equals("application/x-gzip") || contentMimeType.equals("application/gzip"))) { - if (this.logger.isFine()) this.logger.logFine("Sitemap file has mimetype " + contentMimeType); + if (logger.isFine()) logger.logFine("Sitemap file has mimetype " + contentMimeType); contentStream = new GZIPInputStream(contentStream); } final httpdByteCountInputStream counterStream = new httpdByteCountInputStream(contentStream, null); // parse it - this.logger.logInfo("Start parsing sitemap file " + this.siteMapURL + "\n\tMimeType: " + contentMimeType + + logger.logInfo("Start parsing sitemap file " + this.siteMapURL + "\n\tMimeType: " + contentMimeType + "\n\tLength: " + this.contentLength); final SAXParser saxParser = SAXParserFactory.newInstance().newSAXParser(); saxParser.parse(counterStream, this); @@ -186,7 +186,7 @@ public class SitemapParser extends DefaultHandler { res.closeStream(); } } catch (final Exception e) { - this.logger.logWarning("Unable to parse sitemap file " + this.siteMapURL, e); + logger.logWarning("Unable to parse sitemap file " + this.siteMapURL, e); } finally { if (res != null) { // release connection @@ -283,7 +283,7 @@ public class SitemapParser extends DefaultHandler { 0, 0 )); - this.logger.logInfo("New URL '" + this.nextURL + "' added for crawling."); + logger.logInfo("New URL '" + this.nextURL + "' added for crawling."); this.urlCounter++; } } @@ -293,7 +293,7 @@ public class SitemapParser extends DefaultHandler { // TODO: we need to decode the URL here this.nextURL = (new String(buf, offset, len)).trim(); if (!this.nextURL.startsWith("http") && !this.nextURL.startsWith("https")) { - this.logger.logInfo("The url '" + this.nextURL + "' has a wrong format. Ignore it."); + logger.logInfo("The url '" + this.nextURL + "' has a wrong format. Ignore it."); this.nextURL = null; } } else if (this.currentElement.equalsIgnoreCase(SITEMAP_URL_LASTMOD)) { @@ -301,7 +301,7 @@ public class SitemapParser extends DefaultHandler { try { this.lastMod = DateFormatter.parseISO8601(dateStr); } catch (final ParseException e) { - this.logger.logInfo("Unable to parse datestring '" + dateStr + "'"); + logger.logInfo("Unable to parse datestring '" + dateStr + "'"); } } } diff --git a/source/de/anomic/data/diff.java b/source/de/anomic/data/diff.java index 9844a8370..83ae0ce0b 100644 --- a/source/de/anomic/data/diff.java +++ b/source/de/anomic/data/diff.java @@ -38,8 +38,8 @@ import de.anomic.htmlFilter.htmlFilterCharacterCoding; public class diff { private final ArrayList parts = new ArrayList(); - final Object[] o; - final Object[] n; + private final Object[] o; + private final Object[] n; /** * @param o the original String diff --git a/source/de/anomic/htmlFilter/htmlFilterContentScraper.java b/source/de/anomic/htmlFilter/htmlFilterContentScraper.java index 7a739ad03..751c3ef3f 100644 --- a/source/de/anomic/htmlFilter/htmlFilterContentScraper.java +++ b/source/de/anomic/htmlFilter/htmlFilterContentScraper.java @@ -85,7 +85,7 @@ public class htmlFilterContentScraper extends htmlFilterAbstractScraper implemen //private String headline; private List[] headlines; private serverCharBuffer content; - private final EventListenerList htmlFilterEventListeners = new EventListenerList(); + private final EventListenerList htmlFilterEventListeners; /** * {@link yacyURL} to the favicon that belongs to the document @@ -110,6 +110,7 @@ public class htmlFilterContentScraper extends htmlFilterAbstractScraper implemen this.headlines = new ArrayList[4]; for (int i = 0; i < 4; i++) headlines[i] = new ArrayList(); this.content = new serverCharBuffer(1024); + this.htmlFilterEventListeners = new EventListenerList(); } public final static boolean punctuation(final char c) { @@ -463,7 +464,7 @@ public class htmlFilterContentScraper extends htmlFilterAbstractScraper implemen } } - void fireScrapeTag0(final String tagname, final Properties tagopts) { + private void fireScrapeTag0(final String tagname, final Properties tagopts) { final Object[] listeners = this.htmlFilterEventListeners.getListenerList(); for (int i=0; i { private BlockingQueue cache; - private final entry poison = new entry(new byte[0], 0); private BytesIntMap map; private Future result; private boolean sortAtEnd; @@ -200,7 +200,7 @@ public class BytesIntMap { public void finish(boolean sortAtEnd) { this.sortAtEnd = sortAtEnd; try { - cache.put(poison); + cache.put(poisonEntry); } catch (InterruptedException e) { e.printStackTrace(); } @@ -221,7 +221,7 @@ public class BytesIntMap { public BytesIntMap call() throws IOException { try { entry c; - while ((c = cache.take()) != poison) { + while ((c = cache.take()) != poisonEntry) { map.addi(c.key, c.l); } } catch (InterruptedException e) { diff --git a/source/de/anomic/kelondro/index/BytesLongMap.java b/source/de/anomic/kelondro/index/BytesLongMap.java index 2f615551a..9722c6347 100644 --- a/source/de/anomic/kelondro/index/BytesLongMap.java +++ b/source/de/anomic/kelondro/index/BytesLongMap.java @@ -214,11 +214,11 @@ public class BytesLongMap { this.l = l; } } + private static final entry poisonEntry = new entry(new byte[0], 0); public static class initDataConsumer implements Callable { private BlockingQueue cache; - private final entry poison = new entry(new byte[0], 0); private BytesLongMap map; private Future result; @@ -250,7 +250,7 @@ public class BytesLongMap { */ public void finish() { try { - cache.put(poison); + cache.put(poisonEntry); } catch (InterruptedException e) { e.printStackTrace(); } @@ -271,7 +271,7 @@ public class BytesLongMap { public BytesLongMap call() throws IOException { try { entry c; - while ((c = cache.take()) != poison) { + while ((c = cache.take()) != poisonEntry) { map.addl(c.key, c.l); } } catch (InterruptedException e) { diff --git a/source/de/anomic/kelondro/table/SQLTable.java b/source/de/anomic/kelondro/table/SQLTable.java index 1d103657c..362adeaf2 100644 --- a/source/de/anomic/kelondro/table/SQLTable.java +++ b/source/de/anomic/kelondro/table/SQLTable.java @@ -67,7 +67,7 @@ public class SQLTable implements ObjectIndex { private static final String db_pwd_str = "yacy"; private Connection theDBConnection = null; - private final ByteOrder order = new NaturalOrder(true); + private static final ByteOrder order = new NaturalOrder(true); private final Row rowdef; public SQLTable(final String dbType, final Row rowdef) throws Exception { @@ -274,7 +274,7 @@ public class SQLTable implements ObjectIndex { } public ByteOrder order() { - return this.order; + return order; } public int primarykey() { diff --git a/source/de/anomic/plasma/plasmaParser.java b/source/de/anomic/plasma/plasmaParser.java index 419cf408d..1220bc536 100644 --- a/source/de/anomic/plasma/plasmaParser.java +++ b/source/de/anomic/plasma/plasmaParser.java @@ -173,10 +173,10 @@ public final class plasmaParser { loadAvailableParserList(); } - private final Log theLogger = new Log("PARSER"); + private static final Log theLogger = new Log("PARSER"); public Log getLogger() { - return this.theLogger; + return theLogger; } public static HashMap getParserConfigList() { @@ -539,13 +539,13 @@ public final class plasmaParser { throws InterruptedException, ParserException { ByteArrayInputStream byteIn = null; try { - if (this.theLogger.isFine()) - this.theLogger.logFine("Parsing '" + location + "' from byte-array"); + if (theLogger.isFine()) + theLogger.logFine("Parsing '" + location + "' from byte-array"); // testing if the resource is not empty if (sourceArray == null || sourceArray.length == 0) { final String errorMsg = "No resource content available (1) " + ((sourceArray == null) ? "source == null" : "source.length() == 0"); - this.theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location, errorMsg); } @@ -561,7 +561,7 @@ public final class plasmaParser { if (e instanceof ParserException) throw (ParserException) e; // log unexpected error - this.theLogger.logSevere("Unexpected exception in parseSource from byte-array: " + e.getMessage(), e); + theLogger.logSevere("Unexpected exception in parseSource from byte-array: " + e.getMessage(), e); throw new ParserException("Unexpected exception while parsing " + location,location, e); } finally { if (byteIn != null) try { byteIn.close(); } catch (final Exception ex){/* ignore this */} @@ -573,13 +573,13 @@ public final class plasmaParser { BufferedInputStream sourceStream = null; try { - if (this.theLogger.isFine()) - this.theLogger.logFine("Parsing '" + location + "' from file"); + if (theLogger.isFine()) + theLogger.logFine("Parsing '" + location + "' from file"); // testing if the resource is not empty if (!(sourceFile.exists() && sourceFile.canRead() && sourceFile.length() > 0)) { final String errorMsg = sourceFile.exists() ? "Empty resource file." : "No resource content available (2)."; - this.theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location, "document has no content"); } @@ -595,7 +595,7 @@ public final class plasmaParser { if (e instanceof ParserException) throw (ParserException) e; // log unexpected error - this.theLogger.logSevere("Unexpected exception in parseSource from File: " + e.getMessage(), e); + theLogger.logSevere("Unexpected exception in parseSource from File: " + e.getMessage(), e); throw new ParserException("Unexpected exception while parsing " + location,location, e); } finally { if (sourceStream != null) try { sourceStream.close(); } catch (final Exception ex){/* ignore this */} @@ -617,8 +617,8 @@ public final class plasmaParser { Parser theParser = null; String mimeType = null; try { - if (this.theLogger.isFine()) - this.theLogger.logFine("Parsing '" + location + "' from stream"); + if (theLogger.isFine()) + theLogger.logFine("Parsing '" + location + "' from stream"); // getting the mimetype of the document mimeType = normalizeMimeType(theMimeType); @@ -633,12 +633,12 @@ public final class plasmaParser { // testing if parsing is supported for this resource if (!plasmaParser.supportedContent(location,mimeType)) { final String errorMsg = "No parser available to parse mimetype '" + mimeType + "'"; - this.theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location, "wrong mime type or wrong extension"); } - if (this.theLogger.isFine()) - this.theLogger.logInfo("Parsing " + location + " with mimeType '" + mimeType + + if (theLogger.isFine()) + theLogger.logInfo("Parsing " + location + " with mimeType '" + mimeType + "' and file extension '" + fileExt + "'."); // getting the correct parser for the given mimeType @@ -655,21 +655,21 @@ public final class plasmaParser { doc = parseHtml(location, mimeType, documentCharset, sourceStream); } else { final String errorMsg = "No parser available to parse mimetype '" + mimeType + "'"; - this.theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location, "wrong mime type or wrong extension"); } // check result if (doc == null) { final String errorMsg = "Unexpected error. Parser returned null."; - this.theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logInfo("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location); } return doc; } catch (final UnsupportedEncodingException e) { final String errorMsg = "unsupported charset encoding: " + e.getMessage(); - this.theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg, e); + theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg, e); throw new ParserException(errorMsg,location, errorMsg); } catch (final Exception e) { // Interrupted- and Parser-Exceptions should pass through @@ -678,7 +678,7 @@ public final class plasmaParser { // log unexpected error final String errorMsg = "Unexpected exception. " + e.getMessage(); - this.theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg, e); + theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg, e); throw new ParserException(errorMsg,location,e); } finally { @@ -700,7 +700,7 @@ public final class plasmaParser { } if (!documentCharset.equalsIgnoreCase(charset)) { - this.theLogger.logInfo("Charset transformation needed from '" + documentCharset + "' to '" + charset + "' for URL = " + location.toNormalform(true, true)); + theLogger.logInfo("Charset transformation needed from '" + documentCharset + "' to '" + charset + "' for URL = " + location.toNormalform(true, true)); } // parsing the content @@ -713,7 +713,7 @@ public final class plasmaParser { //hfos.close(); if (writer.binarySuspect()) { final String errorMsg = "Binary data found in resource"; - this.theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg); + theLogger.logSevere("Unable to parse '" + location + "'. " + errorMsg); throw new ParserException(errorMsg,location); } return transformScraper(location, mimeType, documentCharset, scraper); diff --git a/source/de/anomic/plasma/plasmaWordIndex.java b/source/de/anomic/plasma/plasmaWordIndex.java index a9f1b46b7..a31863639 100644 --- a/source/de/anomic/plasma/plasmaWordIndex.java +++ b/source/de/anomic/plasma/plasmaWordIndex.java @@ -77,6 +77,7 @@ public final class plasmaWordIndex implements indexRI { public static final int wCacheMaxChunk = 800; // maximum number of references for each urlhash public static final int lowcachedivisor = 900; public static final int maxCollectionPartition = 7; // should be 7 + private static final ByteOrder indexOrder = Base64Order.enhancedCoder; public static final String CRAWL_PROFILE_PROXY = "proxy"; @@ -95,7 +96,6 @@ public final class plasmaWordIndex implements indexRI { public static final long CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA_RECRAWL_CYCLE = 60L * 24L * 30L; - private final ByteOrder indexOrder = Base64Order.enhancedCoder; private final indexRAMRI indexCache; private final indexCollectionRI collections; // new database structure to replace AssortmentCluster and FileCluster private final Log log; diff --git a/source/de/anomic/server/serverCore.java b/source/de/anomic/server/serverCore.java index 97aa21365..95a4a4a33 100644 --- a/source/de/anomic/server/serverCore.java +++ b/source/de/anomic/server/serverCore.java @@ -85,12 +85,14 @@ public final class serverCore extends serverAbstractBusyThread implements server /** * Line End of HTTP/ICAP headers */ - public static final byte[] CRLF = {CR, LF}; - public static final String CRLF_STRING = new String(CRLF); - public static final String LF_STRING = new String(new byte[]{LF}); - public static final Class[] stringType = {"".getClass()}; // set up some reflection - public static final long startupTime = System.currentTimeMillis(); - public static final ThreadGroup sessionThreadGroup = new ThreadGroup("sessionThreadGroup"); + public static final byte[] CRLF = {CR, LF}; + public static final String CRLF_STRING = new String(CRLF); + public static final String LF_STRING = new String(new byte[]{LF}); + public static final Class[] stringType = {"".getClass()}; // set up some reflection + public static final long startupTime = System.currentTimeMillis(); + public static final ThreadGroup sessionThreadGroup = new ThreadGroup("sessionThreadGroup"); + private static final HashMap commandObjMethodCache = new HashMap(5); + /** * will be increased with each session and is used to return a hash code */ @@ -465,7 +467,6 @@ public final class serverCore extends serverAbstractBusyThread implements server private long start; // startup time private serverHandler commandObj; - private final HashMap commandObjMethodCache = new HashMap(5); private String request; // current command line private int commandCounter; // for logging: number of commands in this session @@ -747,11 +748,11 @@ public final class serverCore extends serverAbstractBusyThread implements server this.controlSocket.setSoTimeout(this.socketTimeout); // exec command and return value - Object commandMethod = this.commandObjMethodCache.get(reqProtocol + "_" + reqCmd); + Object commandMethod = commandObjMethodCache.get(reqProtocol + "_" + reqCmd); if (commandMethod == null) { try { commandMethod = this.commandObj.getClass().getMethod(reqCmd, stringType); - this.commandObjMethodCache.put(reqProtocol + "_" + reqCmd, commandMethod); + commandObjMethodCache.put(reqProtocol + "_" + reqCmd, commandMethod); } catch (final NoSuchMethodException noMethod) { commandMethod = this.commandObj.getClass().getMethod("UNKNOWN", stringType); stringParameter[0] = this.request.trim(); diff --git a/source/de/anomic/urlRedirector/urlRedirectord.java b/source/de/anomic/urlRedirector/urlRedirectord.java index 20417de6c..97f270700 100644 --- a/source/de/anomic/urlRedirector/urlRedirectord.java +++ b/source/de/anomic/urlRedirector/urlRedirectord.java @@ -24,7 +24,7 @@ public class urlRedirectord implements serverHandler, Cloneable { private serverCore.Session session; private static plasmaSwitchboard sb = null; - private final Log theLogger = new Log("URL-REDIRECTOR"); + private static final Log theLogger = new Log("URL-REDIRECTOR"); private static CrawlProfile.entry profile = null; private String nextURL; @@ -150,7 +150,7 @@ public class urlRedirectord implements serverHandler, Cloneable { final int pos = line.indexOf(" "); if (pos != -1) { final String newDepth = line.substring(pos).trim(); - this.theLogger.logFine("Changing crawling depth to '" + newDepth + "'."); + theLogger.logFine("Changing crawling depth to '" + newDepth + "'."); sb.webIndex.profilesActiveCrawls.changeEntry(profile, "generalDepth",newDepth); } outputWriter.print("\r\n"); @@ -159,7 +159,7 @@ public class urlRedirectord implements serverHandler, Cloneable { final int pos = line.indexOf(" "); if (pos != -1) { final String newValue = line.substring(pos).trim(); - this.theLogger.logFine("Changing crawl dynamic setting to '" + newValue + "'"); + theLogger.logFine("Changing crawl dynamic setting to '" + newValue + "'"); sb.webIndex.profilesActiveCrawls.changeEntry(profile, "crawlingQ",newValue); } outputWriter.print("\r\n"); @@ -172,7 +172,7 @@ public class urlRedirectord implements serverHandler, Cloneable { final int pos = line.indexOf(" "); this.nextURL = (pos != -1) ? line.substring(0,pos):line; - this.theLogger.logFine("Receiving request " + line); + theLogger.logFine("Receiving request " + line); outputWriter.print("\r\n"); outputWriter.flush(); @@ -216,18 +216,18 @@ public class urlRedirectord implements serverHandler, Cloneable { } if (reasonString != null) { - this.theLogger.logFine("URL " + nextURL + " rejected. Reason: " + reasonString); + theLogger.logFine("URL " + nextURL + " rejected. Reason: " + reasonString); } nextURL = null; } } - this.theLogger.logFine("Connection terminated"); + theLogger.logFine("Connection terminated"); // Terminating connection return serverCore.TERMINATE_CONNECTION; } catch (final Exception e) { - this.theLogger.logSevere("Unexpected Error: " + e.getMessage(),e); + theLogger.logSevere("Unexpected Error: " + e.getMessage(),e); return serverCore.TERMINATE_CONNECTION; } } diff --git a/source/de/anomic/xml/opensearchdescriptionReader.java b/source/de/anomic/xml/opensearchdescriptionReader.java index 0a7f195dc..b23cf2471 100644 --- a/source/de/anomic/xml/opensearchdescriptionReader.java +++ b/source/de/anomic/xml/opensearchdescriptionReader.java @@ -97,7 +97,7 @@ public class opensearchdescriptionReader extends DefaultHandler { private Item channel; private final StringBuilder buffer; private boolean parsingChannel; - private final String imageURL = null; + private final String imageURL; private final ArrayList itemsGUID; // a list of GUIDs, so the items can be retrieved by a specific order private final HashMap items; // a guid:Item map @@ -108,6 +108,7 @@ public class opensearchdescriptionReader extends DefaultHandler { buffer = new StringBuilder(); channel = null; parsingChannel = false; + imageURL = null; } public opensearchdescriptionReader(final String path) { diff --git a/source/de/anomic/yacy/logging/ConsoleOutErrHandler.java b/source/de/anomic/yacy/logging/ConsoleOutErrHandler.java index c0edfe160..81b6a6385 100644 --- a/source/de/anomic/yacy/logging/ConsoleOutErrHandler.java +++ b/source/de/anomic/yacy/logging/ConsoleOutErrHandler.java @@ -39,10 +39,12 @@ public final class ConsoleOutErrHandler extends Handler { private boolean ignoreCtrlChr = false; private Level splitLevel = Level.WARNING; - private final Handler stdOutHandler = new ConsoleOutHandler(); - private final Handler stdErrHandler = new ConsoleHandler(); + private final Handler stdOutHandler; + private final Handler stdErrHandler; public ConsoleOutErrHandler() { + this.stdOutHandler = new ConsoleOutHandler(); + this.stdErrHandler = new ConsoleHandler(); this.stdOutHandler.setLevel(Level.FINEST); this.stdErrHandler.setLevel(Level.WARNING); configure(); diff --git a/source/de/anomic/yacy/yacyURL.java b/source/de/anomic/yacy/yacyURL.java index 5bfc8cb77..363505904 100644 --- a/source/de/anomic/yacy/yacyURL.java +++ b/source/de/anomic/yacy/yacyURL.java @@ -47,7 +47,7 @@ public class yacyURL implements Serializable { */ private static final long serialVersionUID = -1173233022912141884L; public static final int TLD_any_zone_filter = 255; // from TLD zones can be filtered during search; this is the catch-all filter - private final static Pattern backPathPattern = Pattern.compile("(/[^/]+(?