support for multiple sitemaps in robots.txt

pull/1/head
Michael Peter Christen 11 years ago
parent a373fb717d
commit 85a427ec54

@ -94,7 +94,7 @@ public class CrawlCheck_p {
robotsAllowed = !entry.robotsTxtEntry.isDisallowed(entry.digestURL);
prop.put("table_list_" + row + "_robots", "robots exist: " + (robotsAllowed ? "crawl allowed" : "url disallowed"));
prop.put("table_list_" + row + "_crawldelay", Math.max(agent.minimumDelta, entry.robotsTxtEntry.getCrawlDelayMillis()) + " ms");
prop.put("table_list_" + row + "_sitemap", entry.robotsTxtEntry.getSitemap() == null ? "-" : entry.robotsTxtEntry.getSitemap().toNormalform(true));
prop.put("table_list_" + row + "_sitemap", entry.robotsTxtEntry.getSitemaps().toString());
}
// try to load the url

@ -26,7 +26,9 @@
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
@ -35,7 +37,6 @@ import javax.xml.parsers.ParserConfigurationException;
import net.yacy.cora.document.id.AnchorURL;
import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
@ -157,8 +158,11 @@ public class getpageinfo {
prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo());
// get the sitemap URL of the domain
final MultiProtocolURL sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
final List<String> sitemaps = robotsEntry == null ? new ArrayList<String>(0) : robotsEntry.getSitemaps();
for (int i = 0; i < sitemaps.size(); i++) {
prop.putXML("sitemaps_" + i + "_sitemap", sitemaps.get(i));
}
prop.put("sitemaps", sitemaps.size());
} catch (final MalformedURLException e) {
ConcurrentLog.logException(e);
}

@ -5,7 +5,9 @@
<lang>#[lang]#</lang>
<robots>#(robots-allowed)#0::1::#(/robots-allowed)#</robots>
<robotsInfo>#[robotsInfo]#</robotsInfo>
#{sitemaps}#
<sitemap>#[sitemap]#</sitemap>
#{/sitemaps}#
<favicon>#[favicon]#</favicon>
<sitelist>#[sitelist]#</sitelist>
<filter>#[filter]#</filter>

@ -26,7 +26,9 @@
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
@ -35,7 +37,6 @@ import javax.xml.parsers.ParserConfigurationException;
import net.yacy.cora.document.id.AnchorURL;
import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
@ -158,8 +159,11 @@ public class getpageinfo_p {
prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo());
// get the sitemap URL of the domain
final MultiProtocolURL sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
final List<String> sitemaps = robotsEntry == null ? new ArrayList<String>(0) : robotsEntry.getSitemaps();
for (int i = 0; i < sitemaps.size(); i++) {
prop.putXML("sitemaps_" + i + "_sitemap", sitemaps.get(i));
}
prop.put("sitemaps", sitemaps.size());
} catch (final MalformedURLException e) {
ConcurrentLog.logException(e);
}

@ -5,7 +5,9 @@
<lang>#[lang]#</lang>
<robots>#(robots-allowed)#0::1::#(/robots-allowed)#</robots>
<robotsInfo>#[robotsInfo]#</robotsInfo>
#{sitemaps}#
<sitemap>#[sitemap]#</sitemap>
#{/sitemaps}#
<favicon>#[favicon]#</favicon>
<sitelist>#[sitelist]#</sitelist>
<filter>#[filter]#</filter>

@ -28,7 +28,6 @@
package net.yacy.crawler.robots;
import java.net.MalformedURLException;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
@ -47,17 +46,17 @@ public class RobotsTxtEntry {
private static final String HOST_NAME = "hostname";
private static final String ALLOW_PATH_LIST = "allow";
private static final String DISALLOW_PATH_LIST = "disallow";
private static final String SITEMAP_LIST = "sitemap";
private static final String LOADED_DATE = "date";
private static final String MOD_DATE = "modDate";
private static final String ETAG = "etag";
private static final String SITEMAP = "sitemap";
private static final String CRAWL_DELAY = "crawlDelay";
private static final String CRAWL_DELAY_MILLIS = "crawlDelayMillis";
private static final String AGENT_NAME = "agentname";
// this is a simple record structure that holds all properties of a single crawl start
private final Map<String, byte[]> mem;
private final List<String> allowPathList, denyPathList;
private final List<String> allowPathList, denyPathList, sitemapList;
private final String hostName, agentName;
private String info; // this is filled if robots disallowed access; then the reason is noted there;
@ -66,31 +65,25 @@ public class RobotsTxtEntry {
this.mem = mem;
this.info = "";
if (this.mem.containsKey(DISALLOW_PATH_LIST)) {
this.denyPathList = new LinkedList<String>();
final String csPl = UTF8.String(this.mem.get(DISALLOW_PATH_LIST));
if (csPl.length() > 0){
final String[] pathArray = csPl.split(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR);
if ((pathArray != null)&&(pathArray.length > 0)) {
this.denyPathList.addAll(Arrays.asList(pathArray));
}
}
} else {
this.denyPathList = new LinkedList<String>();
}
if (this.mem.containsKey(ALLOW_PATH_LIST)) {
fillMultiValue(this.denyPathList, DISALLOW_PATH_LIST);
this.allowPathList = new LinkedList<String>();
final String csPl = UTF8.String(this.mem.get(ALLOW_PATH_LIST));
fillMultiValue(this.allowPathList, ALLOW_PATH_LIST);
this.sitemapList = new LinkedList<String>();
fillMultiValue(this.sitemapList, SITEMAP_LIST);
this.agentName = this.mem.containsKey(AGENT_NAME) ? UTF8.String(this.mem.get(AGENT_NAME)) : null;
}
private void fillMultiValue(List<String> list, String listName) {
if (this.mem.containsKey(listName)) {
final String csPl = UTF8.String(this.mem.get(listName));
if (csPl.length() > 0){
final String[] pathArray = csPl.split(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR);
if ((pathArray != null)&&(pathArray.length > 0)) {
this.allowPathList.addAll(Arrays.asList(pathArray));
if ((pathArray != null) && (pathArray.length > 0)) {
list.addAll(Arrays.asList(pathArray));
}
}
} else {
this.allowPathList = new LinkedList<String>();
}
this.agentName = this.mem.containsKey(AGENT_NAME) ? UTF8.String(this.mem.get(AGENT_NAME)) : null;
}
protected RobotsTxtEntry(
@ -100,7 +93,7 @@ public class RobotsTxtEntry {
final Date loadedDate,
final Date modDate,
final String eTag,
final String sitemap,
final List<String> sitemapList,
final long crawlDelayMillis,
final String agentName
) {
@ -109,6 +102,7 @@ public class RobotsTxtEntry {
this.hostName = RobotsTxt.getHostPort(theURL).toLowerCase();
this.allowPathList = new LinkedList<String>();
this.denyPathList = new LinkedList<String>();
this.sitemapList = new LinkedList<String>();
this.agentName = agentName;
this.mem = new LinkedHashMap<String, byte[]>(10);
@ -116,30 +110,23 @@ public class RobotsTxtEntry {
if (loadedDate != null) this.mem.put(LOADED_DATE, ASCII.getBytes(Long.toString(loadedDate.getTime())));
if (modDate != null) this.mem.put(MOD_DATE, ASCII.getBytes(Long.toString(modDate.getTime())));
if (eTag != null) this.mem.put(ETAG, UTF8.getBytes(eTag));
if (sitemap != null) this.mem.put(SITEMAP, UTF8.getBytes(sitemap));
if (crawlDelayMillis > 0) this.mem.put(CRAWL_DELAY_MILLIS, ASCII.getBytes(Long.toString(crawlDelayMillis)));
if (agentName != null) this.mem.put(AGENT_NAME, UTF8.getBytes(agentName));
if (allowPathList != null && !allowPathList.isEmpty()) {
this.allowPathList.addAll(allowPathList);
final StringBuilder pathListStr = new StringBuilder(allowPathList.size() * 30);
for (final String element : allowPathList) {
pathListStr.append(element)
.append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR);
}
this.mem.put(ALLOW_PATH_LIST, UTF8.getBytes(pathListStr.substring(0,pathListStr.length()-1)));
readMultiValue(allowPathList, this.allowPathList, ALLOW_PATH_LIST);
readMultiValue(disallowPathList, this.denyPathList, DISALLOW_PATH_LIST);
readMultiValue(sitemapList, this.sitemapList, SITEMAP_LIST);
}
if (disallowPathList != null && !disallowPathList.isEmpty()) {
this.denyPathList.addAll(disallowPathList);
private void readMultiValue(List<String> externallist, List<String> internallist, String listName) {
if (externallist != null && !externallist.isEmpty()) {
internallist.addAll(externallist);
final StringBuilder pathListStr = new StringBuilder(disallowPathList.size() * 30);
for (final String element : disallowPathList) {
pathListStr.append(element)
.append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR);
final StringBuilder pathListStr = new StringBuilder(externallist.size() * 30);
for (final String element : externallist) {
pathListStr.append(element).append(RobotsTxt.ROBOTS_DB_PATH_SEPARATOR);
}
this.mem.put(DISALLOW_PATH_LIST, UTF8.getBytes(pathListStr.substring(0, pathListStr.length()-1)));
this.mem.put(listName, UTF8.getBytes(pathListStr.substring(0, pathListStr.length()-1)));
}
}
@ -174,17 +161,11 @@ public class RobotsTxtEntry {
}
/**
* get the sitemap url
* @return the sitemap url or null if no sitemap url is given
* get the sitemap urls
* @return a list of sitemap urls (possibly empty), but not null
*/
public MultiProtocolURL getSitemap() {
final String url = this.mem.containsKey(SITEMAP)? UTF8.String(this.mem.get(SITEMAP)): null;
if (url == null) return null;
try {
return new MultiProtocolURL(url);
} catch (final MalformedURLException e) {
return null;
}
public List<String> getSitemaps() {
return this.sitemapList;
}
protected Date getLoadedDate() {

@ -72,7 +72,7 @@ public final class RobotsTxtParser {
private final ArrayList<String> allowList;
private final ArrayList<String> denyList;
private String sitemap;
private ArrayList<String> sitemaps;
private long crawlDelayMillis;
private final String[] myNames; // a list of own name lists
private String agentName; // the name of the agent that was used to return the result
@ -80,7 +80,7 @@ public final class RobotsTxtParser {
protected RobotsTxtParser(final String[] myNames) {
this.allowList = new ArrayList<String>(0);
this.denyList = new ArrayList<String>(0);
this.sitemap = "";
this.sitemaps = new ArrayList<String>(0);
this.crawlDelayMillis = 0;
this.myNames = myNames;
this.agentName = null;
@ -128,10 +128,10 @@ public final class RobotsTxtParser {
// parse sitemap; if there are several sitemaps then take the first url
// TODO: support for multiple sitemaps
if (lineUpper.startsWith(ROBOTS_SITEMAP) && (this.sitemap == null || this.sitemap.isEmpty())) {
if (lineUpper.startsWith(ROBOTS_SITEMAP)) {
pos = line.indexOf(' ');
if (pos != -1) {
this.sitemap = line.substring(pos).trim();
this.sitemaps.add(line.substring(pos).trim());
}
continue lineparser;
}
@ -258,8 +258,8 @@ public final class RobotsTxtParser {
return this.agentName;
}
protected String sitemap() {
return this.sitemap;
protected ArrayList<String> sitemap() {
return this.sitemaps;
}
protected ArrayList<String> allowList() {

Loading…
Cancel
Save