removed more unused method parameters

pull/1/head
Michael Peter Christen 13 years ago
parent 83701a1b4c
commit 7c1ba99755

@ -196,7 +196,7 @@ public class Bookmarks {
// try to get the bookmark from the LURL database
final URIMetadataRow urlentry = sb.index.urlMetadata().load(ASCII.getBytes(urlHash));
if (urlentry != null) try {
final Document document = Document.mergeDocuments(urlentry.url(), null, sb.loader.loadDocuments(sb.loader.request(urlentry.url(), true, false), CacheStrategy.IFEXIST, 5000, Integer.MAX_VALUE, null));
final Document document = Document.mergeDocuments(urlentry.url(), null, sb.loader.loadDocuments(sb.loader.request(urlentry.url(), true, false), CacheStrategy.IFEXIST, Integer.MAX_VALUE, null));
prop.put("mode_edit", "0"); // create mode
prop.put("mode_url", urlentry.url().toNormalform(false, true));
prop.putHTML("mode_title", urlentry.dc_title());

@ -492,7 +492,7 @@ public class Crawler_p {
cachePolicy);
sb.crawler.putActive(profile.handle().getBytes(), profile);
sb.pauseCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
sb.crawlStacker.enqueueEntriesAsynchronous(sb.peers.mySeed().hash.getBytes(), profile.handle(), hyperlinks, true);
sb.crawlStacker.enqueueEntriesAsynchronous(sb.peers.mySeed().hash.getBytes(), profile.handle(), hyperlinks);
} catch (final PatternSyntaxException e) {
prop.put("info", "4"); // crawlfilter does not match url
prop.putHTML("info_newcrawlingfilter", newcrawlingMustMatch);

@ -28,7 +28,7 @@ import de.anomic.server.serverSwitch;
public class PerformanceConcurrency_p {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, final serverSwitch sb) {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, @SuppressWarnings("unused") final serverSwitch sb) {
// return variable that accumulates replacements
final serverObjects prop = new serverObjects();

@ -35,7 +35,7 @@ import de.anomic.server.serverSwitch;
public class PerformanceSearch_p {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, final serverSwitch sb) {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, @SuppressWarnings("unused") final serverSwitch sb) {
// return variable that accumulates replacements
final serverObjects prop = new serverObjects();

@ -120,13 +120,9 @@ public class Table {
}
if (post.containsKey("load")) {
o = Interaction.GetTableentry(s, p, global ? "global" : username, "");
o = Interaction.GetTableentry(s, p, global ? "global" : username);
} else {
Interaction.Tableentry(s, p, o, global ? "global" : username, "");
}
prop.put("result", o);

@ -667,7 +667,6 @@ public class yacysearch {
sb.loader.loadDocuments(
sb.loader.request(urlentry.url(), true, false),
CacheStrategy.IFEXIST,
5000,
Integer.MAX_VALUE, BlacklistType.SEARCH);
} catch ( final IOException e ) {
} catch ( final Parser.Failure e ) {

@ -181,7 +181,7 @@ public final class CrawlStacker {
}
}
}
public void enqueueEntriesAsynchronous(final byte[] initiator, final String profileHandle, final Map<MultiProtocolURI, Properties> hyperlinks, final boolean replace) {
public void enqueueEntriesAsynchronous(final byte[] initiator, final String profileHandle, final Map<MultiProtocolURI, Properties> hyperlinks) {
new Thread() {
@Override
public void run() {

@ -172,7 +172,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
return (serverClient.equals("*")) ? true : match(session.userAddress.getHostAddress(), serverClient);
}
private static boolean allowYaCyHop(final Session session) {
private static boolean allowYaCyHop() {
return switchboard.getConfigBool("YaCyHop", false);
}
@ -256,9 +256,9 @@ public final class HTTPDemon implements serverHandler, Cloneable {
return persistent;
}
private boolean handleYaCyHopAuthentication(final RequestHeader header, final HashMap<String, Object> prop, final Session session) {
private boolean handleYaCyHopAuthentication(final RequestHeader header, final HashMap<String, Object> prop) {
// check if the user has allowed that his/her peer is used for hops
if (!allowYaCyHop(session)) return false;
if (!allowYaCyHop()) return false;
// proxy hops must identify with 4 criteria:
@ -408,7 +408,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
}
} else {
// pass to proxy
if (((allowYaCyHop(session)) && (handleYaCyHopAuthentication(header, prop, session))) ||
if (((allowYaCyHop()) && (handleYaCyHopAuthentication(header, prop))) ||
((allowProxy(session)) && (handleProxyAuthentication(header, prop, session)))) {
HTTPDProxyHandler.doGet(prop, header, session.out);
} else {
@ -476,7 +476,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
}
} else {
// pass to proxy
if (((allowYaCyHop(session)) && (handleYaCyHopAuthentication(header, prop, session))) ||
if (((allowYaCyHop()) && (handleYaCyHopAuthentication(header, prop))) ||
((allowProxy(session)) && (handleProxyAuthentication(header, prop, session)))) {
HTTPDProxyHandler.doHead(prop, header, session.out);
} else {
@ -542,7 +542,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
}
} else {
// pass to proxy
if (((allowYaCyHop(session)) && (handleYaCyHopAuthentication(header, prop, session))) ||
if (((allowYaCyHop()) && (handleYaCyHopAuthentication(header, prop))) ||
((allowProxy(session)) && (handleProxyAuthentication(header, prop, session)))) {
HTTPDProxyHandler.doPost(prop, header, session.out, sessionIn);
} else {
@ -631,7 +631,7 @@ public final class HTTPDemon implements serverHandler, Cloneable {
}
// pass to proxy
if (((allowYaCyHop(session)) && (handleYaCyHopAuthentication(header, prop, session))) ||
if (((allowYaCyHop()) && (handleYaCyHopAuthentication(header, prop))) ||
((allowProxy(session)) && (handleProxyAuthentication(header, prop, session)))) {
HTTPDProxyHandler.doConnect(prop, header, session.in, session.out);
} else {

@ -11,6 +11,7 @@ import net.yacy.document.Document;
import net.yacy.document.Parser;
import net.yacy.document.parser.rdfa.impl.RDFaParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import de.anomic.data.ymark.YMarkUtil;
@ -23,7 +24,7 @@ public class AugmentParser extends AbstractParser implements Parser {
super("AugmentParser");
this.rdfaParser = new RDFaParser();
System.out.println("augmented parser was initialized");
Log.logInfo("AugmentedParser", "augmented parser was initialized");
this.SUPPORTED_EXTENSIONS.add("html");
this.SUPPORTED_EXTENSIONS.add("php");
@ -34,27 +35,18 @@ public class AugmentParser extends AbstractParser implements Parser {
}
@Override
public Document[] parse(DigestURI url, String mimeType,
String charset, InputStream source) throws Failure,
InterruptedException {
public Document[] parse(DigestURI url, String mimeType, String charset, InputStream source) throws Failure, InterruptedException {
Document[] htmlDocs = this.rdfaParser.parse(url, mimeType, charset, source);
try {
source.reset();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Log.logException(e);
}
Document alreadyParsedDocument = htmlDocs[0];
Document superDoc = analyze(alreadyParsedDocument, url, mimeType, charset, source);
Document augmentDoc = parseAndAugment(url, mimeType, charset, source);
Document superDoc = analyze(alreadyParsedDocument, url, mimeType, charset);
Document augmentDoc = parseAndAugment(url, mimeType, charset);
Document[] retDocs = new Document[htmlDocs.length + 2];
for (int i = 0; i < htmlDocs.length; i++) {
retDocs[i] = htmlDocs[i];
@ -62,23 +54,18 @@ public class AugmentParser extends AbstractParser implements Parser {
retDocs[retDocs.length - 1] = augmentDoc;
retDocs[retDocs.length - 2] = superDoc;
return retDocs;
}
private Document analyze (Document alreadyParsedDocument, DigestURI url,
String mimeType, String charset, InputStream source) {
String mimeType, String charset) {
Document newDoc = new Document(url, mimeType, charset, null, null, null, "", "",
"", null, "", 0, 0, null, null, null, null, false);
// if the magic word appears in the document, perform extra actions.
if (alreadyParsedDocument.getKeywords().contains("magicword")) {
String all = "";
all = "yacylatest";
newDoc = new Document(url, mimeType, charset, null, null, null, "", "",
"", null, "", 0, 0, all, null, null, null, false);
@ -87,48 +74,31 @@ public class AugmentParser extends AbstractParser implements Parser {
return newDoc;
}
private Document parseAndAugment(DigestURI url,
String mimeType, String charset, InputStream source) {
private Document parseAndAugment(DigestURI url, String mimeType, String charset) {
String all = "";
Document newDoc = new Document(url, mimeType, charset, null, null, null, "", "",
"", null, "", 0, 0, all, null, null, null, false);
Iterator<net.yacy.kelondro.blob.Tables.Row> it;
try {
it = Switchboard.getSwitchboard().tables.iterator("aggregatedtags");
it = Switchboard.getSwitchboard().tables.orderBy(it, -1, "timestamp_creation").iterator();
while (it.hasNext()) {
net.yacy.kelondro.blob.Tables.Row r = it.next();
if (r.get("url", "").equals (url.toNormalform(false, false))) {
Set<String> tags = new HashSet<String>();
for (String s : YMarkUtil.keysStringToSet(r.get("scitag", ""))) {
tags.add(s);
}
newDoc.addTags(tags);
}
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
Log.logException(e);
}
return newDoc;
}

@ -124,7 +124,7 @@ public class Interaction {
public static String GetTableentry(String url, String type, String username, String peer) {
public static String GetTableentry(String url, String type, String username) {
final Switchboard sb = Switchboard.getSwitchboard();

@ -81,7 +81,6 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
final int posinphrase, // position of word in its phrase
final int posofphrase, // number of the phrase where word appears
final long lastmodified, // last-modified time of the document where word appears
final long updatetime, // update time; this is needed to compute a TTL for the word, so it can be removed easily if the TTL is short
byte[] language, // (guessed) language of document
final char doctype, // type of document
final int outlinksSame, // outlinks to same domain
@ -174,7 +173,6 @@ public class WordReferenceVars extends AbstractReference implements WordReferenc
this.posinphrase,
this.posofphrase,
this.lastModified,
System.currentTimeMillis(),
this.language,
this.type,
this.llocal,

@ -201,7 +201,7 @@ public final class IndexCell<ReferenceType extends Reference> extends AbstractBu
// rewrite old files (hack from sixcooler, see http://forum.yacy-websuche.de/viewtopic.php?p=15004#p15004)
term = 10;
while (term-- > 0 && (this.merger.queueLength() < 1)) {
if (!this.array.shrinkOldFiles(this.merger, targetFileSize)) break;
if (!this.array.shrinkOldFiles(this.merger)) break;
donesomething = true;
}

@ -435,7 +435,7 @@ public final class ReferenceContainerArray<ReferenceType extends Reference> {
return true;
}
public boolean shrinkOldFiles(final IODispatcher merger, final long targetFileSize) {
public boolean shrinkOldFiles(final IODispatcher merger) {
final File ff = this.array.unmountOldest();
if (ff == null) return false;
Log.logInfo("RICELL-shrink4/rewrite", "unmountOldest()");

@ -334,7 +334,7 @@ public final class LoaderDispatcher {
return entry.getContent();
}
public Document[] loadDocuments(final Request request, final CacheStrategy cacheStrategy, final int timeout, final int maxFileSize, BlacklistType blacklistType) throws IOException, Parser.Failure {
public Document[] loadDocuments(final Request request, final CacheStrategy cacheStrategy, final int maxFileSize, BlacklistType blacklistType) throws IOException, Parser.Failure {
// load resource
final Response response = load(request, cacheStrategy, maxFileSize, blacklistType);

@ -538,7 +538,7 @@ public class Segment {
try {
// parse the resource
final Document document = Document.mergeDocuments(entry.url(), null, loader.loadDocuments(loader.request(entry.url(), true, false), cacheStrategy, 10000, Integer.MAX_VALUE, null));
final Document document = Document.mergeDocuments(entry.url(), null, loader.loadDocuments(loader.request(entry.url(), true, false), cacheStrategy, Integer.MAX_VALUE, null));
if (document == null) {
// delete just the url entry
urlMetadata().remove(urlhash);

@ -134,7 +134,7 @@ public class MediaSnippet implements Comparable<MediaSnippet>, Comparator<MediaS
return o1.compareTo(o2);
}
public static List<MediaSnippet> retrieveMediaSnippets(final DigestURI url, final HandleSet queryhashes, final Classification.ContentDomain mediatype, final CacheStrategy cacheStrategy, final int timeout, final boolean reindexing) {
public static List<MediaSnippet> retrieveMediaSnippets(final DigestURI url, final HandleSet queryhashes, final Classification.ContentDomain mediatype, final CacheStrategy cacheStrategy, final boolean reindexing) {
if (queryhashes.isEmpty()) {
Log.logFine("snippet fetch", "no query hashes given for url " + url);
return new ArrayList<MediaSnippet>();
@ -142,7 +142,7 @@ public class MediaSnippet implements Comparable<MediaSnippet>, Comparator<MediaS
Document document;
try {
document = Document.mergeDocuments(url, null, Switchboard.getSwitchboard().loader.loadDocuments(Switchboard.getSwitchboard().loader.request(url, false, reindexing), cacheStrategy, timeout, Integer.MAX_VALUE, BlacklistType.SEARCH));
document = Document.mergeDocuments(url, null, Switchboard.getSwitchboard().loader.loadDocuments(Switchboard.getSwitchboard().loader.request(url, false, reindexing), cacheStrategy, Integer.MAX_VALUE, BlacklistType.SEARCH));
} catch (final IOException e) {
Log.logFine("snippet fetch", "load error: " + e.getMessage());
return new ArrayList<MediaSnippet>();

Loading…
Cancel
Save