diff --git a/htroot/CrawlCheck_p.java b/htroot/CrawlCheck_p.java index 3af63de8c..d66ccb8e2 100644 --- a/htroot/CrawlCheck_p.java +++ b/htroot/CrawlCheck_p.java @@ -94,7 +94,7 @@ public class CrawlCheck_p { robotsAllowed = !entry.robotsTxtEntry.isDisallowed(entry.digestURL); prop.put("table_list_" + row + "_robots", "robots exist: " + (robotsAllowed ? "crawl allowed" : "url disallowed")); prop.put("table_list_" + row + "_crawldelay", Math.max(agent.minimumDelta, entry.robotsTxtEntry.getCrawlDelayMillis()) + " ms"); - prop.put("table_list_" + row + "_sitemap", entry.robotsTxtEntry.getSitemap() == null ? "-" : entry.robotsTxtEntry.getSitemap().toNormalform(true)); + prop.put("table_list_" + row + "_sitemap", entry.robotsTxtEntry.getSitemaps().toString()); } // try to load the url diff --git a/htroot/api/getpageinfo.java b/htroot/api/getpageinfo.java index eb6cd052b..c511c5e75 100644 --- a/htroot/api/getpageinfo.java +++ b/htroot/api/getpageinfo.java @@ -26,7 +26,9 @@ import java.io.IOException; import java.net.MalformedURLException; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.Set; import javax.xml.parsers.DocumentBuilder; @@ -35,7 +37,6 @@ import javax.xml.parsers.ParserConfigurationException; import net.yacy.cora.document.id.AnchorURL; import net.yacy.cora.document.id.DigestURL; -import net.yacy.cora.document.id.MultiProtocolURL; import net.yacy.cora.federate.yacy.CacheStrategy; import net.yacy.cora.protocol.ClientIdentification; import net.yacy.cora.protocol.RequestHeader; @@ -157,8 +158,11 @@ public class getpageinfo { prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo()); // get the sitemap URL of the domain - final MultiProtocolURL sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap(); - prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString()); + final List sitemaps = robotsEntry == null ? new ArrayList(0) : robotsEntry.getSitemaps(); + for (int i = 0; i < sitemaps.size(); i++) { + prop.putXML("sitemaps_" + i + "_sitemap", sitemaps.get(i)); + } + prop.put("sitemaps", sitemaps.size()); } catch (final MalformedURLException e) { ConcurrentLog.logException(e); } diff --git a/htroot/api/getpageinfo.xml b/htroot/api/getpageinfo.xml index 664e1972c..0758b2333 100644 --- a/htroot/api/getpageinfo.xml +++ b/htroot/api/getpageinfo.xml @@ -5,7 +5,9 @@ #[lang]# #(robots-allowed)#0::1::#(/robots-allowed)# #[robotsInfo]# + #{sitemaps}# #[sitemap]# + #{/sitemaps}# #[favicon]# #[sitelist]# #[filter]# diff --git a/htroot/api/getpageinfo_p.java b/htroot/api/getpageinfo_p.java index 6981d9397..f280f87a0 100644 --- a/htroot/api/getpageinfo_p.java +++ b/htroot/api/getpageinfo_p.java @@ -26,7 +26,9 @@ import java.io.IOException; import java.net.MalformedURLException; +import java.util.ArrayList; import java.util.Collection; +import java.util.List; import java.util.Set; import javax.xml.parsers.DocumentBuilder; @@ -35,7 +37,6 @@ import javax.xml.parsers.ParserConfigurationException; import net.yacy.cora.document.id.AnchorURL; import net.yacy.cora.document.id.DigestURL; -import net.yacy.cora.document.id.MultiProtocolURL; import net.yacy.cora.federate.yacy.CacheStrategy; import net.yacy.cora.protocol.ClientIdentification; import net.yacy.cora.protocol.RequestHeader; @@ -158,8 +159,11 @@ public class getpageinfo_p { prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo()); // get the sitemap URL of the domain - final MultiProtocolURL sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap(); - prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString()); + final List sitemaps = robotsEntry == null ? new ArrayList(0) : robotsEntry.getSitemaps(); + for (int i = 0; i < sitemaps.size(); i++) { + prop.putXML("sitemaps_" + i + "_sitemap", sitemaps.get(i)); + } + prop.put("sitemaps", sitemaps.size()); } catch (final MalformedURLException e) { ConcurrentLog.logException(e); } diff --git a/htroot/api/getpageinfo_p.xml b/htroot/api/getpageinfo_p.xml index 664e1972c..0758b2333 100644 --- a/htroot/api/getpageinfo_p.xml +++ b/htroot/api/getpageinfo_p.xml @@ -5,7 +5,9 @@ #[lang]# #(robots-allowed)#0::1::#(/robots-allowed)# #[robotsInfo]# + #{sitemaps}# #[sitemap]# + #{/sitemaps}# #[favicon]# #[sitelist]# #[filter]# diff --git a/source/net/yacy/crawler/robots/RobotsTxtEntry.java b/source/net/yacy/crawler/robots/RobotsTxtEntry.java index 293f9b127..c71ebfd2b 100644 --- a/source/net/yacy/crawler/robots/RobotsTxtEntry.java +++ b/source/net/yacy/crawler/robots/RobotsTxtEntry.java @@ -28,7 +28,6 @@ package net.yacy.crawler.robots; -import java.net.MalformedURLException; import java.util.Arrays; import java.util.Date; import java.util.LinkedHashMap; @@ -47,17 +46,17 @@ public class RobotsTxtEntry { private static final String HOST_NAME = "hostname"; private static final String ALLOW_PATH_LIST = "allow"; private static final String DISALLOW_PATH_LIST = "disallow"; + private static final String SITEMAP_LIST = "sitemap"; private static final String LOADED_DATE = "date"; private static final String MOD_DATE = "modDate"; private static final String ETAG = "etag"; - private static final String SITEMAP = "sitemap"; private static final String CRAWL_DELAY = "crawlDelay"; private static final String CRAWL_DELAY_MILLIS = "crawlDelayMillis"; private static final String AGENT_NAME = "agentname"; // this is a simple record structure that holds all properties of a single crawl start private final Map mem; - private final List allowPathList, denyPathList; + private final List allowPathList, denyPathList, sitemapList; private final String hostName, agentName; private String info; // this is filled if robots disallowed access; then the reason is noted there; @@ -66,33 +65,27 @@ public class RobotsTxtEntry { this.mem = mem; this.info = ""; - if (this.mem.containsKey(DISALLOW_PATH_LIST)) { - this.denyPathList = new LinkedList(); - final String csPl = UTF8.String(this.mem.get(DISALLOW_PATH_LIST)); - if (csPl.length() > 0){ - final String[] pathArray = csPl.split(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR); - if ((pathArray != null)&&(pathArray.length > 0)) { - this.denyPathList.addAll(Arrays.asList(pathArray)); - } - } - } else { - this.denyPathList = new LinkedList(); - } - if (this.mem.containsKey(ALLOW_PATH_LIST)) { - this.allowPathList = new LinkedList(); - final String csPl = UTF8.String(this.mem.get(ALLOW_PATH_LIST)); + this.denyPathList = new LinkedList(); + fillMultiValue(this.denyPathList, DISALLOW_PATH_LIST); + this.allowPathList = new LinkedList(); + fillMultiValue(this.allowPathList, ALLOW_PATH_LIST); + this.sitemapList = new LinkedList(); + fillMultiValue(this.sitemapList, SITEMAP_LIST); + this.agentName = this.mem.containsKey(AGENT_NAME) ? UTF8.String(this.mem.get(AGENT_NAME)) : null; + } + + private void fillMultiValue(List list, String listName) { + if (this.mem.containsKey(listName)) { + final String csPl = UTF8.String(this.mem.get(listName)); if (csPl.length() > 0){ final String[] pathArray = csPl.split(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR); - if ((pathArray != null)&&(pathArray.length > 0)) { - this.allowPathList.addAll(Arrays.asList(pathArray)); + if ((pathArray != null) && (pathArray.length > 0)) { + list.addAll(Arrays.asList(pathArray)); } } - } else { - this.allowPathList = new LinkedList(); } - this.agentName = this.mem.containsKey(AGENT_NAME) ? UTF8.String(this.mem.get(AGENT_NAME)) : null; } - + protected RobotsTxtEntry( final MultiProtocolURL theURL, final List allowPathList, @@ -100,7 +93,7 @@ public class RobotsTxtEntry { final Date loadedDate, final Date modDate, final String eTag, - final String sitemap, + final List sitemapList, final long crawlDelayMillis, final String agentName ) { @@ -109,6 +102,7 @@ public class RobotsTxtEntry { this.hostName = RobotsTxt.getHostPort(theURL).toLowerCase(); this.allowPathList = new LinkedList(); this.denyPathList = new LinkedList(); + this.sitemapList = new LinkedList(); this.agentName = agentName; this.mem = new LinkedHashMap(10); @@ -116,33 +110,26 @@ public class RobotsTxtEntry { if (loadedDate != null) this.mem.put(LOADED_DATE, ASCII.getBytes(Long.toString(loadedDate.getTime()))); if (modDate != null) this.mem.put(MOD_DATE, ASCII.getBytes(Long.toString(modDate.getTime()))); if (eTag != null) this.mem.put(ETAG, UTF8.getBytes(eTag)); - if (sitemap != null) this.mem.put(SITEMAP, UTF8.getBytes(sitemap)); if (crawlDelayMillis > 0) this.mem.put(CRAWL_DELAY_MILLIS, ASCII.getBytes(Long.toString(crawlDelayMillis))); if (agentName != null) this.mem.put(AGENT_NAME, UTF8.getBytes(agentName)); - if (allowPathList != null && !allowPathList.isEmpty()) { - this.allowPathList.addAll(allowPathList); - - final StringBuilder pathListStr = new StringBuilder(allowPathList.size() * 30); - for (final String element : allowPathList) { - pathListStr.append(element) - .append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR); - } - this.mem.put(ALLOW_PATH_LIST, UTF8.getBytes(pathListStr.substring(0,pathListStr.length()-1))); - } + readMultiValue(allowPathList, this.allowPathList, ALLOW_PATH_LIST); + readMultiValue(disallowPathList, this.denyPathList, DISALLOW_PATH_LIST); + readMultiValue(sitemapList, this.sitemapList, SITEMAP_LIST); + } - if (disallowPathList != null && !disallowPathList.isEmpty()) { - this.denyPathList.addAll(disallowPathList); + private void readMultiValue(List externallist, List internallist, String listName) { + if (externallist != null && !externallist.isEmpty()) { + internallist.addAll(externallist); - final StringBuilder pathListStr = new StringBuilder(disallowPathList.size() * 30); - for (final String element : disallowPathList) { - pathListStr.append(element) - .append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR); + final StringBuilder pathListStr = new StringBuilder(externallist.size() * 30); + for (final String element : externallist) { + pathListStr.append(element).append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR); } - this.mem.put(DISALLOW_PATH_LIST, UTF8.getBytes(pathListStr.substring(0, pathListStr.length()-1))); + this.mem.put(listName, UTF8.getBytes(pathListStr.substring(0, pathListStr.length()-1))); } } - + protected String getHostName() { return this.hostName; } @@ -174,17 +161,11 @@ public class RobotsTxtEntry { } /** - * get the sitemap url - * @return the sitemap url or null if no sitemap url is given + * get the sitemap urls + * @return a list of sitemap urls (possibly empty), but not null */ - public MultiProtocolURL getSitemap() { - final String url = this.mem.containsKey(SITEMAP)? UTF8.String(this.mem.get(SITEMAP)): null; - if (url == null) return null; - try { - return new MultiProtocolURL(url); - } catch (final MalformedURLException e) { - return null; - } + public List getSitemaps() { + return this.sitemapList; } protected Date getLoadedDate() { diff --git a/source/net/yacy/crawler/robots/RobotsTxtParser.java b/source/net/yacy/crawler/robots/RobotsTxtParser.java index a5e54939e..bc0d767c9 100644 --- a/source/net/yacy/crawler/robots/RobotsTxtParser.java +++ b/source/net/yacy/crawler/robots/RobotsTxtParser.java @@ -72,7 +72,7 @@ public final class RobotsTxtParser { private final ArrayList allowList; private final ArrayList denyList; - private String sitemap; + private ArrayList sitemaps; private long crawlDelayMillis; private final String[] myNames; // a list of own name lists private String agentName; // the name of the agent that was used to return the result @@ -80,7 +80,7 @@ public final class RobotsTxtParser { protected RobotsTxtParser(final String[] myNames) { this.allowList = new ArrayList(0); this.denyList = new ArrayList(0); - this.sitemap = ""; + this.sitemaps = new ArrayList(0); this.crawlDelayMillis = 0; this.myNames = myNames; this.agentName = null; @@ -128,10 +128,10 @@ public final class RobotsTxtParser { // parse sitemap; if there are several sitemaps then take the first url // TODO: support for multiple sitemaps - if (lineUpper.startsWith(ROBOTS_SITEMAP) && (this.sitemap == null || this.sitemap.isEmpty())) { + if (lineUpper.startsWith(ROBOTS_SITEMAP)) { pos = line.indexOf(' '); if (pos != -1) { - this.sitemap = line.substring(pos).trim(); + this.sitemaps.add(line.substring(pos).trim()); } continue lineparser; } @@ -258,8 +258,8 @@ public final class RobotsTxtParser { return this.agentName; } - protected String sitemap() { - return this.sitemap; + protected ArrayList sitemap() { + return this.sitemaps; } protected ArrayList allowList() {