refactoring: the cache belongs to the crawler

pull/1/head
Michael Peter Christen 13 years ago
parent 8429967ea7
commit 33d1062c79

@ -31,7 +31,7 @@ import net.yacy.document.ImageParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import de.anomic.http.client.Cache;
import de.anomic.crawler.Cache;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import de.anomic.server.servletProperties;

@ -33,8 +33,8 @@ import net.yacy.cora.protocol.RequestHeader;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import de.anomic.crawler.Cache;
import de.anomic.data.WorkTables;
import de.anomic.http.client.Cache;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;

@ -68,10 +68,10 @@ import net.yacy.search.query.RWIProcess;
import net.yacy.search.query.SearchEventCache;
import net.yacy.search.ranking.BlockRank;
import net.yacy.search.ranking.ReferenceOrder;
import de.anomic.crawler.Cache;
import de.anomic.crawler.ResultURLs;
import de.anomic.data.ListManager;
import de.anomic.data.WorkTables;
import de.anomic.http.client.Cache;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;

@ -34,7 +34,7 @@ import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import de.anomic.http.client.Cache;
import de.anomic.crawler.Cache;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;

@ -53,8 +53,8 @@ import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Segment;
import net.yacy.search.index.Segments;
import de.anomic.crawler.Cache;
import de.anomic.crawler.retrieval.Response;
import de.anomic.http.client.Cache;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;

@ -53,7 +53,6 @@ import net.yacy.kelondro.order.Base64Order;
import net.yacy.kelondro.table.Table;
import net.yacy.kelondro.util.MemoryControl;
import de.anomic.crawler.retrieval.Request;
import de.anomic.http.client.Cache;
public class Balancer {

@ -32,7 +32,7 @@
the class shall also be used to do a cache-cleaning and index creation
*/
package de.anomic.http.client;
package de.anomic.crawler;
import java.io.File;
import java.io.IOException;

@ -87,9 +87,9 @@ import net.yacy.kelondro.util.FileUtils;
import net.yacy.repository.Blacklist;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import de.anomic.crawler.Cache;
import de.anomic.crawler.retrieval.Request;
import de.anomic.crawler.retrieval.Response;
import de.anomic.http.client.Cache;
import de.anomic.server.serverCore;
import de.anomic.server.serverObjects;

@ -39,8 +39,8 @@ import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.visualization.RasterPlotter;
import de.anomic.crawler.Cache;
import de.anomic.crawler.retrieval.Response;
import de.anomic.http.client.Cache;
public class OSMTile {

@ -54,6 +54,7 @@ import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Segments;
import de.anomic.crawler.Cache;
import de.anomic.crawler.CrawlProfile;
import de.anomic.crawler.ZURL.FailCategory;
import de.anomic.crawler.retrieval.FTPLoader;
@ -62,7 +63,6 @@ import de.anomic.crawler.retrieval.HTTPLoader;
import de.anomic.crawler.retrieval.Request;
import de.anomic.crawler.retrieval.Response;
import de.anomic.crawler.retrieval.SMBLoader;
import de.anomic.http.client.Cache;
public final class LoaderDispatcher {

@ -150,6 +150,7 @@ import net.yacy.search.query.SearchEventCache;
import net.yacy.search.ranking.BlockRank;
import net.yacy.search.ranking.RankingProfile;
import net.yacy.search.snippet.ContentDomain;
import de.anomic.crawler.Cache;
import de.anomic.crawler.CrawlProfile;
import de.anomic.crawler.CrawlQueues;
import de.anomic.crawler.CrawlStacker;
@ -174,7 +175,6 @@ import de.anomic.data.WorkTables;
import de.anomic.data.wiki.WikiBoard;
import de.anomic.data.wiki.WikiCode;
import de.anomic.data.wiki.WikiParser;
import de.anomic.http.client.Cache;
import de.anomic.http.server.RobotsTxtConfig;
import de.anomic.server.serverCore;
import de.anomic.server.serverSwitch;

@ -60,8 +60,8 @@ import net.yacy.search.snippet.TextSnippet;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import de.anomic.crawler.Cache;
import de.anomic.data.WorkTables;
import de.anomic.http.client.Cache;
public class SnippetProcess {

@ -266,7 +266,7 @@ public class TextSnippet implements Comparable<TextSnippet>, Comparator<TextSnip
// first try to get the snippet from metadata
String loc;
final Request request = loader.request(url, true, reindexing);
final boolean inCache = de.anomic.http.client.Cache.has(row.url());
final boolean inCache = de.anomic.crawler.Cache.has(row.url());
final boolean noCacheUsage = url.isFile() || url.isSMB() || cacheStrategy == null;
if (containsAllHashes(loc = row.dc_title(), queryhashes) ||
containsAllHashes(loc = row.dc_creator(), queryhashes) ||

Loading…
Cancel
Save