added a logging output for crawl starts that shows the URL that can be used to start the crawl again

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6566 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 15 years ago
parent bc96d74813
commit 909a4f91c7

@ -130,6 +130,9 @@ public class WatchCrawler_p {
if (sb.peers == null) { if (sb.peers == null) {
prop.put("info", "3"); prop.put("info", "3");
} else { } else {
// log a GET url for this crawl start for possible use in cronjobs
Log.logInfo("CRAWLSTART-URL", "http://localhost:" + sb.getConfig("port", "8080") + "/WatchCrawler_p.html?" + post.toString());
// set new properties // set new properties
final boolean fullDomain = post.get("range", "wide").equals("domain"); // special property in simple crawl start final boolean fullDomain = post.get("range", "wide").equals("domain"); // special property in simple crawl start
final boolean subPath = post.get("range", "wide").equals("subpath"); // special property in simple crawl start final boolean subPath = post.get("range", "wide").equals("subpath"); // special property in simple crawl start

@ -353,4 +353,20 @@ public class serverObjects extends HashMap<String, String> implements Cloneable
return super.clone(); return super.clone();
} }
/**
* output the objects in a HTTP GET syntax
*/
public String toString() {
if (this.size() == 0) return "";
StringBuilder param = new StringBuilder();
for (Map.Entry<String, String> entry: this.entrySet()) {
param.append(entry.getKey());
param.append('=');
param.append(entry.getValue());
param.append('&');
}
param.setLength(param.length() - 1);
return param.toString();
}
} }

Loading…
Cancel
Save