simplification of the code: removed unused classes, methods and variables

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6154 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 16 years ago
parent 93dfb51fd4
commit 222850414e

@ -113,7 +113,7 @@ public class BlacklistCleaner_p {
}
// list illegal entries
final HashMap<String, Integer> ies = getIllegalEntries(blacklistToUse, supportedBlacklistTypes, plasmaSwitchboard.urlBlacklist);
final HashMap<String, Integer> ies = getIllegalEntries(blacklistToUse, plasmaSwitchboard.urlBlacklist);
prop.put(RESULTS + "blList", blacklistToUse);
prop.put(RESULTS + "entries", ies.size());
prop.putHTML(RESULTS + "blEngine", plasmaSwitchboard.urlBlacklist.getEngineInfo());
@ -192,7 +192,7 @@ public class BlacklistCleaner_p {
return r.toArray(new String[r.size()]);
}
private static HashMap<String, Integer>/* entry, error-code */ getIllegalEntries(final String blacklistToUse, final String[] supportedBlacklistTypes, final Blacklist blEngine) {
private static HashMap<String, Integer>/* entry, error-code */ getIllegalEntries(final String blacklistToUse, final Blacklist blEngine) {
final HashMap<String, Integer> r = new HashMap<String, Integer>();
final HashSet<String> ok = new HashSet<String>();

@ -132,7 +132,6 @@ public class QuickCrawlLink_p {
pe = sb.crawler.profilesActiveCrawls.newEntry(
crawlingStartURL.getHost(),
crawlingStartURL,
CrawlProfile.KEYWORDS_USER,
crawlingMustMatch,
crawlingMustNotMatch,
CrawlingDepth,

@ -159,7 +159,7 @@ public class ViewFile {
if (resource == null) {
Document entry = null;
try {
entry = sb.crawlQueues.loadResourceFromWeb(url, 10000, false, true, false);
entry = sb.crawlQueues.loadResourceFromWeb(url, true, false);
} catch (final Exception e) {
prop.put("error", "4");
prop.putHTML("error_errorText", e.getMessage());

@ -218,7 +218,6 @@ public class WatchCrawler_p {
final CrawlProfile.entry pe = sb.crawler.profilesActiveCrawls.newEntry(
crawlingStartURL.getHost(),
crawlingStartURL,
CrawlProfile.KEYWORDS_USER,
newcrawlingMustMatch,
newcrawlingMustNotMatch,
newcrawlingdepth,
@ -338,7 +337,7 @@ public class WatchCrawler_p {
// creating a crawler profile
final yacyURL crawlURL = new yacyURL("file://" + file.toString(), null);
final CrawlProfile.entry profile = sb.crawler.profilesActiveCrawls.newEntry(
fileName, crawlURL, CrawlProfile.KEYWORDS_USER,
fileName, crawlURL,
newcrawlingMustMatch,
CrawlProfile.MATCH_NEVER,
newcrawlingdepth,
@ -402,7 +401,7 @@ public class WatchCrawler_p {
// create a new profile
final CrawlProfile.entry pe = sb.crawler.profilesActiveCrawls.newEntry(
sitemapURLStr, sitemapURL, CrawlProfile.KEYWORDS_USER,
sitemapURLStr, sitemapURL,
newcrawlingMustMatch,
CrawlProfile.MATCH_NEVER,
newcrawlingdepth,

@ -201,7 +201,6 @@ public final class search {
0,
filter,
Query.SEARCHDOM_LOCAL,
null,
-1,
null,
false,
@ -255,7 +254,6 @@ public final class search {
0,
filter,
Query.SEARCHDOM_LOCAL,
null,
-1,
constraint,
false,
@ -269,7 +267,7 @@ public final class search {
RSSFeed.channels(RSSFeed.REMOTESEARCH).addMessage(new RSSMessage("Remote Search Request from " + ((remoteSeed == null) ? "unknown" : remoteSeed.getName()), Query.anonymizedQueryHashes(theQuery.queryHashes), ""));
// make event
theSearch = plasmaSearchEvent.getEvent(theQuery, rankingProfile, sb.indexSegment, sb.peers, sb.crawlResults, null, true);
theSearch = plasmaSearchEvent.getEvent(theQuery, sb.indexSegment, sb.peers, sb.crawlResults, null, true);
// set statistic details of search result and find best result index set
if (theSearch.getRankingResult().getLocalResourceSize() == 0) {

@ -427,7 +427,6 @@ public class yacysearch {
urlmask,
(clustersearch && globalsearch) ? Query.SEARCHDOM_CLUSTERALL :
((globalsearch) ? Query.SEARCHDOM_GLOBALDHT : Query.SEARCHDOM_LOCAL),
"",
20,
constraint,
true,
@ -454,7 +453,7 @@ public class yacysearch {
theQuery.setOffset(0); // in case that this is a new search, always start without a offset
offset = 0;
}
final plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, ranking, sb.indexSegment, sb.peers, sb.crawlResults, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false);
final plasmaSearchEvent theSearch = plasmaSearchEvent.getEvent(theQuery, sb.indexSegment, sb.peers, sb.crawlResults, (sb.isRobinsonMode()) ? sb.clusterhashes : null, false);
// generate result object
//serverLog.logFine("LOCAL_SEARCH", "SEARCH TIME AFTER ORDERING OF SEARCH RESULTS: " + (System.currentTimeMillis() - timestamp) + " ms");

@ -196,7 +196,7 @@ public class dbtest {
}
if (dbe.equals("kelondroSplitTable")) {
final File tablepath = new File(tablename).getParentFile();
return new SplitTable(tablepath, new File(tablename).getName(), testRow, true);
return new SplitTable(tablepath, new File(tablename).getName(), testRow);
}
if (dbe.equals("kelondroEcoTable")) {
return new Table(new File(tablename), testRow, Table.tailCacheForceUsage, 1000, 0);

@ -28,7 +28,6 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
@ -47,17 +46,6 @@ public class CrawlProfile {
public static final String MATCH_ALL = ".*";
public static final String MATCH_NEVER = "";
public static final HashSet<String> NO_KEYWORDS = new HashSet<String>(0);
public static final HashSet<String> KEYWORDS_PROXY = word2set("xproxy");
public static final HashSet<String> KEYWORDS_REMOTE = word2set("xremote");
public static final HashSet<String> KEYWORDS_USER = word2set("xuser");
public static final HashSet<String> KEYWORDS_SNIPPET = word2set("xsnippet");
private static final HashSet<String> word2set(String word) {
HashSet<String> s = new HashSet<String>(1);
s.add(word);
return s;
}
static HashMap<String, ConcurrentHashMap<String, DomProfile>> domsCache = new HashMap<String, ConcurrentHashMap<String, DomProfile>>();
@ -168,7 +156,6 @@ public class CrawlProfile {
public entry newEntry( final String name,
final yacyURL startURL,
final Set<String> keywords,
final String mustmatch, final String mustnotmatch,
final int generalDepth,
final long recrawlIfOlder /*date*/, final int domFilterDepth, final int domMaxPages,
@ -180,7 +167,6 @@ public class CrawlProfile {
final entry ne = new entry(
name, startURL,
keywords,
mustmatch, mustnotmatch,
generalDepth,
recrawlIfOlder, domFilterDepth, domMaxPages,
@ -284,7 +270,6 @@ public class CrawlProfile {
public entry(final String name, final yacyURL startURL,
final Set<String> keywords,
final String mustmatch,
final String mustnotmatch,
final int depth,

@ -505,9 +505,7 @@ public class CrawlQueues {
}
public Document loadResourceFromWeb(
final yacyURL url,
final int socketTimeout,
final boolean keepInMemory,
final yacyURL url,
final boolean forText,
final boolean global
) throws IOException {

@ -183,7 +183,7 @@ public final class CrawlSwitchboard {
if (this.defaultProxyProfile == null) {
// generate new default entry for proxy crawling
this.defaultProxyProfile = this.profilesActiveCrawls.newEntry("proxy", null, CrawlProfile.KEYWORDS_PROXY, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER,
this.defaultProxyProfile = this.profilesActiveCrawls.newEntry("proxy", null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER,
0 /*Integer.parseInt(getConfig(PROXY_PREFETCH_DEPTH, "0"))*/,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_PROXY_RECRAWL_CYCLE), -1, -1, false,
true /*getConfigBool(PROXY_INDEXING_LOCAL_TEXT, true)*/,
@ -193,32 +193,32 @@ public final class CrawlSwitchboard {
}
if (this.defaultRemoteProfile == null) {
// generate new default entry for remote crawling
defaultRemoteProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_REMOTE, null, CrawlProfile.KEYWORDS_REMOTE, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultRemoteProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_REMOTE, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
-1, -1, -1, true, true, true, false, true, false, true, true, false);
}
if (this.defaultTextSnippetLocalProfile == null) {
// generate new default entry for snippet fetch and optional crawling
defaultTextSnippetLocalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_LOCAL_TEXT, null, CrawlProfile.KEYWORDS_SNIPPET, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultTextSnippetLocalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_LOCAL_TEXT, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_SNIPPET_LOCAL_TEXT_RECRAWL_CYCLE), -1, -1, true, false, false, false, false, false, true, true, false);
}
if (this.defaultTextSnippetGlobalProfile == null) {
// generate new default entry for snippet fetch and optional crawling
defaultTextSnippetGlobalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_GLOBAL_TEXT, null, CrawlProfile.KEYWORDS_SNIPPET, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultTextSnippetGlobalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_GLOBAL_TEXT, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_SNIPPET_GLOBAL_TEXT_RECRAWL_CYCLE), -1, -1, true, true, true, true, true, false, true, true, false);
}
if (this.defaultMediaSnippetLocalProfile == null) {
// generate new default entry for snippet fetch and optional crawling
defaultMediaSnippetLocalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_LOCAL_MEDIA, null, CrawlProfile.KEYWORDS_SNIPPET, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultMediaSnippetLocalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_LOCAL_MEDIA, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_SNIPPET_LOCAL_MEDIA_RECRAWL_CYCLE), -1, -1, true, false, false, false, false, false, true, true, false);
}
if (this.defaultMediaSnippetGlobalProfile == null) {
// generate new default entry for snippet fetch and optional crawling
defaultMediaSnippetGlobalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA, null, CrawlProfile.KEYWORDS_SNIPPET, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultMediaSnippetGlobalProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA_RECRAWL_CYCLE), -1, -1, true, false, true, true, true, false, true, true, false);
}
if (this.defaultSurrogateProfile == null) {
// generate new default entry for surrogate parsing
defaultSurrogateProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SURROGATE, null, CrawlProfile.KEYWORDS_SNIPPET, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
defaultSurrogateProfile = this.profilesActiveCrawls.newEntry(CRAWL_PROFILE_SURROGATE, null, CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER, 0,
this.profilesActiveCrawls.getRecrawlDate(CRAWL_PROFILE_SURROGATE_RECRAWL_CYCLE), -1, -1, true, true, false, false, false, false, true, true, false);
}
}

@ -308,7 +308,7 @@ public class SitemapParser extends DefaultHandler {
private CrawlProfile.entry createProfile(final String domainName, final yacyURL sitemapURL) {
return this.sb.crawler.profilesActiveCrawls.newEntry(
domainName, sitemapURL, CrawlProfile.KEYWORDS_USER,
domainName, sitemapURL,
// crawling Filter
CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER,
// Depth

@ -254,7 +254,7 @@ public class bookmarksDB {
// stack url
sb.crawler.profilesPassiveCrawls.removeEntry(crawlingStartURL.hash()); // if there is an old entry, delete it
CrawlProfile.entry pe = sb.crawler.profilesActiveCrawls.newEntry(
folder+"/"+crawlingStartURL, crawlingStartURL, CrawlProfile.KEYWORDS_USER,
folder+"/"+crawlingStartURL, crawlingStartURL,
newcrawlingMustMatch,
CrawlProfile.MATCH_NEVER,
newcrawlingdepth,

@ -145,9 +145,9 @@ public final class userDB {
return null;
}
public Entry getUser(final httpRequestHeader header){
return getUser(header.get(httpRequestHeader.AUTHORIZATION), header.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP), header.getHeaderCookies());
return getUser(header.get(httpRequestHeader.AUTHORIZATION), header.getHeaderCookies());
}
public Entry getUser(final String auth, final String ip, final String cookies){
public Entry getUser(final String auth, final String cookies){
Entry entry=null;
if(auth != null)
entry=proxyAuth(auth);
@ -161,8 +161,8 @@ public final class userDB {
* @param auth
* the http-headerline for authorisation
*/
public boolean hasAdminRight(final String auth, final String ip, final String cookies) {
final Entry entry = getUser(auth, ip, cookies);
public boolean hasAdminRight(final String auth, final String cookies) {
final Entry entry = getUser(auth, cookies);
if (entry != null)
return entry.hasAdminRight();
// else if(entry != null && cookieAdminAuth(cookies))

@ -46,7 +46,7 @@ public class knwikiParser implements wikiParser {
private String[] BEs;
private final plasmaSwitchboard sb;
public knwikiParser(final plasmaSwitchboard sb) {
private knwikiParser(final plasmaSwitchboard sb) {
this.sb = sb;
}
@ -96,33 +96,16 @@ public class knwikiParser implements wikiParser {
return parse(content, null);
}
public String transform(final String content, final plasmaSwitchboard sb) {
return parse(content, null);
}
public String transform(final byte[] content) throws UnsupportedEncodingException {
return parse(new String(content, "UTF-8"), null);
}
public String transform(
final byte[] content, final String encoding,
final plasmaSwitchboard switchboard) throws UnsupportedEncodingException {
return parse(new String(content, encoding), null);
}
public String transform(final byte[] content, final String encoding) throws UnsupportedEncodingException {
final byte[] content, final String encoding) throws UnsupportedEncodingException {
return parse(new String(content, encoding), null);
}
public String transform(final byte[] text, final String encoding, final String publicAddress) throws UnsupportedEncodingException {
return parse(new String(text, encoding), publicAddress);
}
public String transform(final String text, final String publicAddress) {
return parse(text, publicAddress);
}
public String parse(String text, final String publicAddress) {
private String parse(String text, final String publicAddress) {
tokens = new Token[] {
new SimpleToken('=', '=', new String[][] { null, { "h2" }, { "h3" }, { "h4" } }, true),
new SimpleToken('\'', '\'', new String[][] { null, { "i" }, { "b" }, null, { "b", "i" } }, false),
@ -159,7 +142,7 @@ public class knwikiParser implements wikiParser {
return Text.mergeTexts(tt);
}
public String parseUnescaped(String text) {
private String parseUnescaped(String text) {
Token st;
Matcher m;
StringBuffer sb;

@ -49,7 +49,7 @@ public class httpSSI {
} else {
out.write(in.getBytes(off, p - off));
}
parseSSI(in, p, q + 3 - p, out, authorization, requesthost);
parseSSI(in, p, out, authorization, requesthost);
off = q + 3;
p = in.indexOf("<!--#".getBytes(), off);
}
@ -60,7 +60,7 @@ public class httpSSI {
}
}
private static void parseSSI(final ByteBuffer in, final int off, final int len, final OutputStream out, final String authorization, final String requesthost) {
private static void parseSSI(final ByteBuffer in, final int off, final OutputStream out, final String authorization, final String requesthost) {
if (in.startsWith("<!--#include virtual=\"".getBytes(), off)) {
final int q = in.indexOf("\"".getBytes(), off + 22);
if (q > 0) {

@ -510,8 +510,6 @@ public final class httpd implements serverHandler, Cloneable {
} catch (final Exception e) {
logUnexpectedError(e);
return serverCore.TERMINATE_CONNECTION;
} finally {
this.doUserAccounting(this.prop);
}
}
@ -582,8 +580,6 @@ public final class httpd implements serverHandler, Cloneable {
} catch (final Exception e) {
logUnexpectedError(e);
return serverCore.TERMINATE_CONNECTION;
} finally {
this.doUserAccounting(this.prop);
}
}
@ -652,8 +648,6 @@ public final class httpd implements serverHandler, Cloneable {
} catch (final Exception e) {
logUnexpectedError(e);
return serverCore.TERMINATE_CONNECTION;
} finally {
this.doUserAccounting(this.prop);
}
}
@ -1434,14 +1428,6 @@ public final class httpd implements serverHandler, Cloneable {
return !disallowZippedContentEncoding.contains(path.substring(pos).toLowerCase());
}
return true;
}
public void doUserAccounting(final Properties conProps) {
// TODO: validation of conprop fields
// httpHeader.CONNECTION_PROP_USER
// httpHeader.CONNECTION_PROP_CLIENTIP
// httpHeader.CONNECTION_PROP_PROXY_RESPOND_SIZE
// httpHeader.CONNECTION_PROP_PROXY_RESPOND_STATUS
}
public static boolean isThisSeedIP(final String hostName) {

@ -310,7 +310,7 @@ public final class httpdFileHandler {
return;
} else if (
(httpd.staticAdminAuthenticated(authorization.trim().substring(6), switchboard) == 4) ||
(sb.userDB.hasAdminRight(authorization, conProp.getProperty(httpHeader.CONNECTION_PROP_CLIENTIP), requestHeader.getHeaderCookies()))) {
(sb.userDB.hasAdminRight(authorization, requestHeader.getHeaderCookies()))) {
//Authentication successful. remove brute-force flag
serverCore.bfHost.remove(conProp.getProperty(httpHeader.CONNECTION_PROP_CLIENTIP));
} else {

@ -333,13 +333,6 @@ public final class httpdProxyHandler {
host = host.substring(0, pos);
}
String ext;
if ((pos = path.lastIndexOf('.')) < 0) {
ext = "";
} else {
ext = path.substring(pos + 1).toLowerCase();
}
// check the blacklist
// blacklist idea inspired by [AS]:
// respond a 404 for all AGIS ("all you get is shit") servers
@ -383,7 +376,7 @@ public final class httpdProxyHandler {
// case 1 and case 3
if (cachedResponseHeader == null) {
if (theLogger.isFinest()) theLogger.logFinest(reqID + " page not in cache: fulfill request from web");
fulfillRequestFromWeb(conProp, url, ext, requestHeader, cachedResponseHeader, countedRespond);
fulfillRequestFromWeb(conProp, url, requestHeader, cachedResponseHeader, countedRespond);
} else {
final Document cacheEntry = new httpdProxyCacheEntry(
0, // crawling depth
@ -401,10 +394,10 @@ public final class httpdProxyHandler {
byte[] cacheContent = plasmaHTCache.getResourceContent(url);
if (cacheContent != null && cacheEntry.shallUseCacheForProxy()) {
if (theLogger.isFinest()) theLogger.logFinest(reqID + " fulfill request from cache");
fulfillRequestFromCache(conProp, url, ext, requestHeader, cachedResponseHeader, cacheContent, countedRespond);
fulfillRequestFromCache(conProp, url, requestHeader, cachedResponseHeader, cacheContent, countedRespond);
} else {
if (theLogger.isFinest()) theLogger.logFinest(reqID + " fulfill request from web");
fulfillRequestFromWeb(conProp, url, ext, requestHeader, cachedResponseHeader, countedRespond);
fulfillRequestFromWeb(conProp, url, requestHeader, cachedResponseHeader, countedRespond);
}
}
@ -434,7 +427,7 @@ public final class httpdProxyHandler {
}
}
private static void fulfillRequestFromWeb(final Properties conProp, final yacyURL url,final String ext, final httpRequestHeader requestHeader, final httpResponseHeader cachedResponseHeader, final OutputStream respond) {
private static void fulfillRequestFromWeb(final Properties conProp, final yacyURL url, final httpRequestHeader requestHeader, final httpResponseHeader cachedResponseHeader, final OutputStream respond) {
final GZIPOutputStream gzippedOut = null;
@ -642,7 +635,6 @@ public final class httpdProxyHandler {
private static void fulfillRequestFromCache(
final Properties conProp,
final yacyURL url,
final String ext,
final httpRequestHeader requestHeader,
final httpResponseHeader cachedResponseHeader,
final byte[] cacheEntry,

@ -622,7 +622,7 @@ public class RowCollection implements Iterable<Row.Entry> {
int p = L;
int q = R - 1;
int pivot = pivot(L, R, S, swapspace);
int pivot = pivot(L, R, S);
if (this.rowdef.objectOrder instanceof Base64Order) {
while (p <= q) {
// wenn pivot < S: pivot befindet sich in sortierter Sequenz von L bis S - 1
@ -677,7 +677,7 @@ public class RowCollection implements Iterable<Row.Entry> {
return p;
}
private final int pivot(final int L, final int R, final int S, final byte[] swapspace) {
private final int pivot(final int L, final int R, final int S) {
if ((S == 0) || (S < L)) {
// the collection has no ordering
// or

@ -1,145 +0,0 @@
// kelondroBufferedIOChunks.java
// -----------------------
// part of The Kelondro Database
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
// Frankfurt, Germany, 2005
// created: 11.12.2004
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// this is a WRITE-buffer!
// the buffer MUST be flushed before closing of the underlying kelondroRA
package de.anomic.kelondro.io;
import java.io.IOException;
public final class BufferedIOChunks extends AbstractIOChunks implements IOChunksInterface {
protected RandomAccessInterface ra;
private int bufferSize;
private final long commitTimeout;
private byte[] buffer;
private long lastCommit = 0;
public BufferedIOChunks(final RandomAccessInterface ra, final String name, final int buffersize, final long commitTimeout) {
this.name = name;
this.ra = ra;
this.bufferSize = 0;
this.commitTimeout = commitTimeout;
this.buffer = null; // this is a buffer at the end of the file. It will be initialized if necessary
this.lastCommit = System.currentTimeMillis();
}
public RandomAccessInterface getRA() {
return this.ra;
}
public long length() throws IOException {
return ra.length() + this.bufferSize;
}
public synchronized void readFully(final long pos, final byte[] b, final int off, final int len) throws IOException {
assert (b.length >= off + len): "read pos=" + pos + ", b.length=" + b.length + ", off=" + off + ", len=" + len;
// check commit time
if (this.lastCommit + this.commitTimeout > System.currentTimeMillis()) {
commit();
}
// do the read
if (pos >= this.ra.length()) {
// read from the buffer
if (this.buffer == null) this.buffer = new byte[this.bufferSize];
System.arraycopy(this.buffer, (int) (pos - this.ra.length()), b, off, len);
} else if (pos + len >= this.ra.length()) {
// the content is partly in the file and partly in the buffer
commit();
this.ra.seek(pos);
ra.readFully(b, off, len);
} else {
// read from the file
this.ra.seek(pos);
ra.readFully(b, off, len);
}
}
public synchronized void write(final long pos, final byte[] b, final int off, final int len) throws IOException {
assert (b.length >= off + len): "write pos=" + pos + ", b.length=" + b.length + ", b='" + new String(b) + "', off=" + off + ", len=" + len;
//assert pos <= this.ra.length(): "pos = " + pos + ", this.ra.length() = " + this.ra.length();
if (len == 0) return;
if (pos >= this.ra.length()) {
// the position is fully outside of the file
if (this.buffer != null && pos - this.ra.length() + len > this.buffer.length) {
// this does not fit into the buffer
commit();
this.ra.seek(pos);
this.ra.write(b, off, len);
return;
}
if (this.buffer == null) this.buffer = new byte[this.bufferSize];
assert b != null;
assert off >= 0 : "off = " + off;
assert off + len <= b.length : "off = " + off + ", len = " + len + ", b.length = " + b.length;
assert this.buffer != null;
assert pos - this.ra.length() >= 0 : "pos = " + pos + ", this.ra.length() = " + this.ra.length();
assert pos - this.ra.length() + len <= this.buffer.length : "pos = " + pos + ", this.ra.length() = " + this.ra.length() + ", len = " + len + ", buffer.length = " + buffer.length;
//pos = 1216, this.ra.length() = 1208, len = 386, buffer.length = 0
System.arraycopy(b, off, this.buffer, (int) (pos - this.ra.length()), len); // OOB Exception :-(
this.bufferSize = (int) Math.max(this.bufferSize, pos - this.ra.length() + len);
return;
} else if (pos + len >= this.ra.length()) {
// the content is partly in the file and partly in the buffer
commit();
this.ra.seek(pos);
this.ra.write(b, off, len);
return;
} else {
// the position is fully inside the file
this.ra.seek(pos);
this.ra.write(b, off, len);
return;
}
}
public synchronized void commit() throws IOException {
this.lastCommit = System.currentTimeMillis();
if (this.buffer == null || this.bufferSize == 0) return;
this.ra.seek(this.ra.length()); // move to end of file
this.ra.write(this.buffer, 0, this.bufferSize);
this.bufferSize = 0;
}
public synchronized void close() throws IOException {
if (this.ra != null) {
commit();
this.ra.close();
}
this.ra = null;
}
protected void finalize() throws Throwable {
if (this.ra != null) this.close();
super.finalize();
}
public void deleteOnExit() {
this.ra.deleteOnExit();
}
}

@ -1,88 +0,0 @@
// kelondroBufferedRA.java
// -----------------------
// part of The Kelondro Database
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
// Frankfurt, Germany, 2005
// last major change: 13.09.2005
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package de.anomic.kelondro.io;
import java.io.IOException;
import de.anomic.kelondro.util.ByteBuffer;
public class BufferedRandomAccess extends AbstractRandomAccess implements RandomAccessInterface {
private ByteBuffer sbb;
private long pos;
public BufferedRandomAccess() {
sbb = new ByteBuffer();
pos = 0;
}
public BufferedRandomAccess(final ByteBuffer bb) {
sbb = bb;
pos = 0;
}
public ByteBuffer getBuffer() {
return this.sbb;
}
public long available() throws IOException {
return Integer.MAX_VALUE - sbb.length();
}
public void close() throws IOException {
sbb = null;
}
public long length() throws IOException {
return sbb.length();
}
public void setLength(long length) throws IOException {
sbb.resize((int) length);
}
public int read() throws IOException {
return 0xff & sbb.byteAt((int) pos++);
}
public void readFully(final byte[] b, final int off, final int len) throws IOException {
final byte[] g = sbb.getBytes((int) pos, len);
pos += g.length;
System.arraycopy(g, 0, b, off, g.length);
}
public void seek(final long pos) throws IOException {
this.pos = pos;
}
public void write(final int b) throws IOException {
this.sbb.overwrite((int) pos, b);
pos++;
}
public void write(final byte[] b, final int off, final int len) throws IOException {
this.sbb.overwrite((int) pos, b, off, len);
pos += len;
}
}

@ -220,7 +220,7 @@ public final class NaturalOrder extends AbstractOrder<byte[]> implements ByteOrd
return sb.toString();
}
public static final String table(final byte[] b, final int linewidth, final int marker) {
public static final String table(final byte[] b, final int linewidth) {
if (b == null) return "NULL";
if (b.length == 0) return "[]";
final StringBuilder sb = new StringBuilder(b.length * 4);

@ -393,7 +393,7 @@ public class Records {
//kelondroRA raf = new kelondroCachedRA(new kelondroFileRA(this.filename), 5000000, 1000);
//kelondroRA raf = new kelondroNIOFileRA(this.filename, (file.length() < 4000000), 10000);
//raf = new kelondroCachedRA(raf);
initExistingFile(raf, true);
initExistingFile(raf);
} else {
this.filename = file.getCanonicalPath();
final RandomAccessInterface raf = (useChannel) ? new ChannelRandomAccess(new File(this.filename)) : new FileRandomAccess(new File(this.filename));
@ -414,28 +414,6 @@ public class Records {
}
}
public Records(final RandomAccessInterface ra, final String filename, final boolean useCache,
final short ohbytec, final short ohhandlec,
final Row rowdef, final int FHandles, final int txtProps, final int txtPropWidth,
final boolean exitOnFail) {
// this always creates a new file
this.fileExisted = false;
this.filename = filename;
this.OHBYTEC = ohbytec;
this.OHHANDLEC = ohhandlec;
this.ROW = rowdef; // create row
this.TXTPROPW = txtPropWidth;
try {
initNewFile(ra, FHandles, txtProps);
} catch (final IOException e) {
logFailure("cannot create / " + e.getMessage());
if (exitOnFail) System.exit(-1);
}
assignRowdef(rowdef);
writeOrderType();
}
public void clear() throws IOException {
RandomAccessInterface ra = this.entryFile.getRA();
final File f = ra.file();
@ -543,14 +521,7 @@ public class Records {
this.theLogger.fine("KELONDRO DEBUG " + this.filename + ": " + message);
}
public Records(final RandomAccessInterface ra, final String filename, final boolean useNodeCache) throws IOException{
this.fileExisted = false;
this.filename = filename;
initExistingFile(ra, useNodeCache);
readOrderType();
}
private void initExistingFile(final RandomAccessInterface ra, boolean useBuffer) throws IOException {
private void initExistingFile(final RandomAccessInterface ra) throws IOException {
// read from Chunked IO
//useBuffer = false;
/*if (useBuffer) {

@ -88,9 +88,8 @@ public class SplitTable implements ObjectIndex {
public SplitTable(
final File path,
final String tablename,
final Row rowdef,
final boolean resetOnFail) {
this(path, tablename, rowdef, ArrayStack.oneMonth, (long) Integer.MAX_VALUE, resetOnFail);
final Row rowdef) {
this(path, tablename, rowdef, ArrayStack.oneMonth, (long) Integer.MAX_VALUE);
}
public SplitTable(
@ -98,22 +97,21 @@ public class SplitTable implements ObjectIndex {
final String tablename,
final Row rowdef,
final long fileAgeLimit,
final long fileSizeLimit,
final boolean resetOnFail) {
final long fileSizeLimit) {
this.path = path;
this.prefix = tablename;
this.rowdef = rowdef;
this.fileAgeLimit = fileAgeLimit;
this.fileSizeLimit = fileSizeLimit;
this.entryOrder = new Row.EntryComparator(rowdef.objectOrder);
init(resetOnFail);
init();
}
String newFilename() {
return prefix + "." + DateFormatter.formatShortMilliSecond(new Date()) + ".table";
}
public void init(final boolean resetOnFail) {
public void init() {
current = null;
// init the thread pool for the keeperOf executor service
@ -212,7 +210,7 @@ public class SplitTable implements ObjectIndex {
if (f.isDirectory()) delete(path, l[i]); else FileUtils.deletedelete(f);
}
}
init(true);
init();
}
public static void delete(final File path, final String tablename) {

@ -215,7 +215,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
this.ram.references(starttermHash, rot),
new MergeIterator<ReferenceContainer<ReferenceType>>(
this.ram.references(starttermHash, false),
this.array.wordContainerIterator(starttermHash, false, false),
this.array.wordContainerIterator(starttermHash, false),
containerOrder,
ReferenceContainer.containerMergeMethod,
true),
@ -232,7 +232,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
}
return new MergeIterator<ReferenceContainer<ReferenceType>>(
this.ram.references(startTermHash, false),
this.array.wordContainerIterator(startTermHash, false, false),
this.array.wordContainerIterator(startTermHash, false),
containerOrder,
ReferenceContainer.containerMergeMethod,
true);

@ -65,7 +65,7 @@ public final class MetadataRepository implements Iterable<byte[]> {
public MetadataRepository(final File path) {
this.location = path;
this.urlIndexFile = new Cache(new SplitTable(this.location, "urls", URLMetadataRow.rowdef, false));
this.urlIndexFile = new Cache(new SplitTable(this.location, "urls", URLMetadataRow.rowdef));
this.exportthread = null; // will have a export thread assigned if exporter is running
this.statsDump = null;
}

@ -107,7 +107,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
* objects in the cache.
* @throws IOException
*/
public synchronized CloneableIterator<ReferenceContainer<ReferenceType>> wordContainerIterator(final byte[] startWordHash, final boolean rot, final boolean ram) {
public synchronized CloneableIterator<ReferenceContainer<ReferenceType>> wordContainerIterator(final byte[] startWordHash, final boolean rot) {
try {
return new heapCacheIterator(startWordHash, rot);
} catch (IOException e) {

@ -78,7 +78,7 @@ public class WordReferenceVars extends AbstractReference implements WordReferen
this.hitcount = hitcount;
this.llocal = outlinksSame;
this.lother = outlinksOther;
this.phrasesintext = outlinksOther;
this.phrasesintext = phrasecount;
this.positions = new ArrayList<Integer>(ps.size());
for (int i = 0; i < ps.size(); i++) this.positions.add(ps.get(i));
this.posinphrase = posinphrase;

@ -123,7 +123,7 @@ public final class Condenser {
Map.Entry<yacyURL, String> entry;
if (indexText) {
createCondensement(document.getText(), document.getCharset());
createCondensement(document.getText());
// the phrase counter:
// phrase 0 are words taken from the URL
// phrase 1 is the MainTitle
@ -244,17 +244,17 @@ public final class Condenser {
}
}
public Condenser(final InputStream text, final String charset) throws UnsupportedEncodingException {
this(text, charset, 3, 2);
public Condenser(final InputStream text) throws UnsupportedEncodingException {
this(text, 3, 2);
}
public Condenser(final InputStream text, final String charset, final int wordminsize, final int wordcut) throws UnsupportedEncodingException {
public Condenser(final InputStream text, final int wordminsize, final int wordcut) throws UnsupportedEncodingException {
this.wordminsize = wordminsize;
this.wordcut = wordcut;
this.languageIdentificator = null; // we don't need that here
// analysis = new Properties();
words = new TreeMap<String, Word>();
createCondensement(text, charset);
createCondensement(text);
}
public int excludeWords(final TreeSet<String> stopwords) {
@ -280,7 +280,7 @@ public final class Condenser {
return s;
}
private void createCondensement(final InputStream is, final String charset) throws UnsupportedEncodingException {
private void createCondensement(final InputStream is) throws UnsupportedEncodingException {
final HashSet<String> currsentwords = new HashSet<String>();
StringBuilder sentence = new StringBuilder(100);
String word = "";
@ -790,7 +790,7 @@ public final class Condenser {
buffer = new ByteArrayInputStream(text.getBytes());
}
try {
return new Condenser(buffer, "UTF-8", 2, 1).words();
return new Condenser(buffer, 2, 1).words();
} catch (final UnsupportedEncodingException e) {
return null;
}

@ -43,12 +43,12 @@ public class plasmaParserConfig {
* @see #loadEnabledParserList()
* @see #setEnabledParserList(Enumeration)
*/
final HashSet<String> enabledParserList = new HashSet<String>();
public final HashSet<String> enabledParserList = new HashSet<String>();
/**
* A list of file extensions that are supported by all enabled parsers
*/
final HashSet<String> supportedFileExt = new HashSet<String>();
private final HashSet<String> supportedFileExt = new HashSet<String>();
/**
* Parsermode this configuration belongs to
@ -89,8 +89,7 @@ public class plasmaParserConfig {
}
}
public boolean supportedFileExt(final yacyURL url) {
private boolean supportedFileExt(final yacyURL url) {
if (url == null) throw new NullPointerException();
// getting the file path
@ -171,7 +170,6 @@ public class plasmaParserConfig {
this.enabledParserList.addAll(newEnabledParsers);
}
synchronized (this.supportedFileExt) {
this.supportedFileExt.clear();
this.supportedFileExt.addAll(newSupportedFileExt);

@ -41,7 +41,6 @@ import de.anomic.kelondro.table.Table;
import de.anomic.kelondro.text.IndexCell;
import de.anomic.kelondro.text.ReferenceContainer;
import de.anomic.kelondro.text.Segment;
import de.anomic.kelondro.text.citationPrototype.CitationReferenceRow;
import de.anomic.kelondro.text.referencePrototype.WordReference;
import de.anomic.kelondro.util.DateFormatter;
import de.anomic.kelondro.util.MemoryControl;
@ -144,7 +143,7 @@ public class plasmaRankingCRProcess {
return true;
}
public static boolean accumulate_upd(final File f, final ObjectIndex acc, final IndexCell<CitationReferenceRow> seq) throws IOException {
public static boolean accumulate_upd(final File f, final ObjectIndex acc) throws IOException {
// open file
AttrSeq source_cr = null;
try {

@ -54,7 +54,6 @@ import de.anomic.plasma.parser.Condenser;
import de.anomic.plasma.plasmaSearchRankingProcess.NavigatorEntry;
import de.anomic.plasma.plasmaSnippetCache.MediaSnippet;
import de.anomic.search.Query;
import de.anomic.search.RankingProfile;
import de.anomic.server.serverProfiling;
import de.anomic.yacy.yacySearch;
import de.anomic.yacy.yacySeed;
@ -351,7 +350,7 @@ public final class plasmaSearchEvent {
if (query.contentdom == Query.CONTENTDOM_TEXT) {
// attach text snippet
startTime = System.currentTimeMillis();
final plasmaSnippetCache.TextSnippet snippet = plasmaSnippetCache.retrieveTextSnippet(metadata, snippetFetchWordHashes, (snippetFetchMode == 2), ((query.constraint != null) && (query.constraint.get(Condenser.flag_cat_indexof))), 180, 3000, (snippetFetchMode == 2) ? Integer.MAX_VALUE : 30000, query.isGlobal());
final plasmaSnippetCache.TextSnippet snippet = plasmaSnippetCache.retrieveTextSnippet(metadata, snippetFetchWordHashes, (snippetFetchMode == 2), ((query.constraint != null) && (query.constraint.get(Condenser.flag_cat_indexof))), 180, (snippetFetchMode == 2) ? Integer.MAX_VALUE : 30000, query.isGlobal());
final long snippetComputationTime = System.currentTimeMillis() - startTime;
Log.logInfo("SEARCH_EVENT", "text snippet load time for " + metadata.url() + ": " + snippetComputationTime + ", " + ((snippet.getErrorCode() < 11) ? "snippet found" : ("no snippet found (" + snippet.getError() + ")")));
@ -461,7 +460,6 @@ public final class plasmaSearchEvent {
public static plasmaSearchEvent getEvent(
final Query query,
final RankingProfile ranking,
final Segment indexSegment,
final yacySeedDB peers,
final ResultURLs crawlResults,

@ -304,7 +304,7 @@ public class plasmaSnippetCache {
}
@SuppressWarnings("unchecked")
public static TextSnippet retrieveTextSnippet(final URLMetadataRow.Components comp, final TreeSet<byte[]> queryhashes, final boolean fetchOnline, final boolean pre, final int snippetMaxLength, final int timeout, final int maxDocLen, final boolean reindexing) {
public static TextSnippet retrieveTextSnippet(final URLMetadataRow.Components comp, final TreeSet<byte[]> queryhashes, final boolean fetchOnline, final boolean pre, final int snippetMaxLength, final int maxDocLen, final boolean reindexing) {
// heise = "0OQUNU3JSs05"
final yacyURL url = comp.url();
if (queryhashes.size() == 0) {
@ -354,7 +354,7 @@ public class plasmaSnippetCache {
// if not found try to download it
// download resource using the crawler and keep resource in memory if possible
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, timeout, true, true, reindexing);
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, true, reindexing);
// getting resource metadata (e.g. the http headers for http resources)
if (entry != null) {
@ -466,7 +466,7 @@ public class plasmaSnippetCache {
// if not found try to download it
// download resource using the crawler and keep resource in memory if possible
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, timeout, true, forText, global);
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, forText, global);
// getting resource metadata (e.g. the http headers for http resources)
if (entry != null) {
@ -923,7 +923,7 @@ public class plasmaSnippetCache {
// if the content is not available in cache try to download it from web
// try to download the resource using a crawler
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, (socketTimeout < 0) ? -1 : socketTimeout, true, forText, reindexing);
final Document entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(url, forText, reindexing);
if (entry == null) return null; // not found in web
// read resource body (if it is there)

@ -1947,7 +1947,7 @@ public final class plasmaSwitchboard extends serverAbstractSwitch<IndexingStack.
if (accessFromLocalhost && (adminAccountBase64MD5.equals(authorization))) return 3; // soft-authenticated for localhost
// authorization by hit in userDB
if (userDB.hasAdminRight((String) requestHeader.get(httpRequestHeader.AUTHORIZATION, "xxxxxx"), ((String) requestHeader.get(httpRequestHeader.CONNECTION_PROP_CLIENTIP, "")), requestHeader.getHeaderCookies())) return 4; //return, because 4=max
if (userDB.hasAdminRight((String) requestHeader.get(httpRequestHeader.AUTHORIZATION, "xxxxxx"), requestHeader.getHeaderCookies())) return 4; //return, because 4=max
// authorization with admin keyword in configuration
return httpd.staticAdminAuthenticated(authorization, this);

@ -134,7 +134,7 @@ public final class Query {
final String navigators,
final boolean onlineSnippetFetch,
final int lines, final int offset, final String urlMask,
final int domType, final String domGroupName, final int domMaxTargets,
final int domType, final int domMaxTargets,
final Bitfield constraint, final boolean allofconstraint,
final String site,
final String authorhash,

@ -58,26 +58,7 @@ public class crypt {
public static final SimpleDateFormat dateFormatter = new SimpleDateFormat("yyyyMMddHHmmssSSS", Locale.ENGLISH);
String cryptMethod; // one of ["TripleDES", "Blowfish", "DESede", "DES"]
private static final String defaultMethod = "PBEWithMD5AndDES"; //"DES";
public crypt(final String pbe) {
// this is possible, but not recommended
this(pbe, (pbe + "XXXXXXXX").substring(0, 8));
}
public crypt(final String pbe, final String salt) {
this(pbe, salt, defaultMethod);
}
private crypt(final String pbe, final String salt, final String method) {
// a Password-Based Encryption. The SecretKey is created on the fly
/* salt currently not used
if (salt.length() > 8) salt = salt.substring(0,8);
if (salt.length() < 8) salt = (salt + "XXXXXXXX").substring(0,8);
*/
// Create a cipher and initialize it for encrypting end decrypting
cryptMethod = method;
}
//private static final String defaultMethod = "PBEWithMD5AndDES"; //"DES";
// --------------------------------------------------------
// Section: simple Codings

@ -39,8 +39,6 @@ public class urlRedirectord implements serverHandler, Cloneable {
"URL Redirector",
// start URL
null,
// keywords
CrawlProfile.KEYWORDS_USER,
// crawling filter
CrawlProfile.MATCH_ALL, CrawlProfile.MATCH_NEVER,
// depth

@ -38,7 +38,6 @@ import de.anomic.yacy.yacyCore;
import de.anomic.yacy.yacySeed;
import de.anomic.yacy.yacySeedDB;
import de.anomic.yacy.yacyVersion;
import de.anomic.yacy.dht.PartitionScheme;
import de.anomic.yacy.logging.Log;
@ -80,7 +79,7 @@ public class PeerSelection {
private static int verifiedOwn = 0;
private static int verifiedNotOwn = 0;
public static boolean shallBeOwnWord(final yacySeedDB seedDB, final byte[] wordhash, String urlhash, int redundancy, PartitionScheme scheme) {
public static boolean shallBeOwnWord(final yacySeedDB seedDB, final byte[] wordhash, String urlhash, int redundancy) {
// the guessIfOwnWord is a fast method that should only fail in case that a 'true' may be incorrect, but a 'false' shall always be correct
if (guessIfOwnWord(seedDB, wordhash, urlhash)) {
// this case must be verified, because it can be wrong.

@ -195,7 +195,7 @@ public class yacyPeerActions {
final boolean res = connectPeer(peer, direct);
if (res) {
// perform all actions if peer is effective new
this.processPeerArrival(peer, direct);
this.processPeerArrival(peer);
RSSFeed.channels(RSSFeed.PEERNEWS).addMessage(new RSSMessage(peer.getName() + " joined the network", "", ""));
}
return res;
@ -218,11 +218,11 @@ public class yacyPeerActions {
// this is called only if the peer has junior status
seedDB.addPotential(peer);
// perform all actions
processPeerArrival(peer, true);
processPeerArrival(peer);
RSSFeed.channels(RSSFeed.PEERNEWS).addMessage(new RSSMessage(peer.getName() + " sent me a ping", "", ""));
}
private void processPeerArrival(final yacySeed peer, final boolean direct) {
private void processPeerArrival(final yacySeed peer) {
final String recordString = peer.get("news", null);
//System.out.println("### triggered news arrival from peer " + peer.getName() + ", news " + ((recordString == null) ? "empty" : "attached"));
if ((recordString == null) || (recordString.length() == 0)) return;

@ -328,7 +328,7 @@ public final class yacyRelease extends yacyVersion {
if ((!download.exists()) || (download.length() == 0)) throw new IOException("wget of url " + this.getUrl() + " failed");
} catch (final IOException e) {
// Saving file failed, abort download
res.abort();
if (res != null) res.abort();
Log.logSevere("yacyVersion", "download of " + this.getName() + " failed: " + e.getMessage());
if (download != null && download.exists()) {
FileUtils.deletedelete(download);

@ -302,12 +302,6 @@ public final class yacySeedDB implements httpdAlternativeDomainNames {
newsPool.close();
peerActions.close();
}
@SuppressWarnings("unchecked")
public void initializeHandler(final String mapname, final Map map) {
// this is used to set up a lastSeen lookup table
}
public Iterator<yacySeed> seedsSortedConnected(final boolean up, final String field) {
// enumerates seed-type objects: all seeds sequentially ordered by field

@ -82,7 +82,7 @@ public class ymageOSM {
// download resource using the crawler and keep resource in memory if possible
Document entry = null;
try {
entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(tileURL, 20000, true, false, false);
entry = plasmaSwitchboard.getSwitchboard().crawlQueues.loadResourceFromWeb(tileURL, false, false);
} catch (IOException e) {
Log.logWarning("yamyOSM", "cannot load: " + e.getMessage());
return null;

Loading…
Cancel
Save