made crawl start pages public since they do not reveal individual

information and they are also not used as servlet to actually start the
crawl (which is Crawler_p.html).
pull/1/head
orbiter 11 years ago
parent e41db47cac
commit d29b6db270

@ -35,7 +35,7 @@ import net.yacy.search.schema.CollectionSchema;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
public class CrawlStartExpert_p {
public class CrawlStartExpert {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
// return variable that accumulates replacements
@ -543,27 +543,6 @@ public class CrawlStartExpert_p {
}
}
/* problaby unused (no corresponding entry in template)
prop.put("proxyPrefetchDepth", env.getConfig("proxyPrefetchDepth", "0"));
final int crawlingDomFilterDepth = env.getConfigInt("crawlingDomFilterDepth", -1);
prop.put("crawlingDomFilterCheck", (crawlingDomFilterDepth == -1) ? "0" : "1");
prop.put("crawlingDomFilterDepth", (crawlingDomFilterDepth == -1) ? 1 : crawlingDomFilterDepth);
prop.put("followFramesChecked", env.getConfigBool("followFrames", true) ? "1" : "0");
final long LCbusySleep = env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 100L);
final int LCppm = (LCbusySleep == 0) ? 1000 : (int) (60000L / LCbusySleep);
prop.put("crawlingSpeedMaxChecked", (LCppm >= 1000) ? "1" : "0");
prop.put("crawlingSpeedCustChecked", ((LCppm > 10) && (LCppm < 1000)) ? "1" : "0");
prop.put("crawlingSpeedMinChecked", (LCppm <= 10) ? "1" : "0");
prop.put("customPPMdefault", ((LCppm > 10) && (LCppm < 1000)) ? Integer.toString(LCppm) : "");
prop.put("xsstopwChecked", env.getConfigBool("xsstopw", true) ? "1" : "0");
prop.put("xdstopwChecked", env.getConfigBool("xdstopw", true) ? "1" : "0");
prop.put("xpstopwChecked", env.getConfigBool("xpstopw", true) ? "1" : "0");
*/
// return rewrite properties
return prop;
}

@ -28,9 +28,9 @@ import net.yacy.cora.protocol.RequestHeader;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
public class CrawlStartSite_p {
public class CrawlStartSite {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
return CrawlStartExpert_p.respond(header, post, env);
return CrawlStartExpert.respond(header, post, env);
}
}

@ -68,7 +68,7 @@
<ul class="nav nav-sidebar menugroup">
<li><h3>First Steps</h3></li>
<li><a href="ConfigBasic.html" class="MenuItemLink">Use Case &amp; Account</a></li>
<li><a href="CrawlStartSite_p.html" class="MenuItemLink lock">Load Web Pages (Crawler)</a></li>
<li><a href="CrawlStartSite.html" class="MenuItemLink">Load Web Pages (Crawler)</a></li>
<li><a href="Performance_p.html" class="MenuItemLink lock">RAM/Disk Usage &amp; Updates</a></li>
</ul>
@ -84,7 +84,7 @@
<ul class="nav nav-sidebar menugroup">
<li><h3>Production</h3></li>
<li><a href="CrawlStartExpert_p.html" class="MenuItemLink lock">Advanced Crawler</a></li>
<li><a href="CrawlStartExpert.html" class="MenuItemLink">Advanced Crawler</a></li>
<li><a href="Load_RSS_p.html" class="MenuItemLink lock">Content Importer</a></li>
<li><a href="Table_API_p.html" class="MenuItemLink lock">Process Scheduler</a></li>
<li><a href="Tables_p.html?table=robots&count=100" class="MenuItemLink lock">Target Analysis</a></li>

@ -1,7 +1,7 @@
<div class="SubMenu">
<h3>Use Case &amp; Accounts</h3>
<ul class="SubMenu">
<li><a href="CrawlStartSite_p.html" class="MenuItemLink lock">Site Crawling</a></li>
<li><a href="CrawlStartSite.html" class="MenuItemLink">Site Crawling</a></li>
<li><a href="ConfigParser.html" class="MenuItemLink lock">Parser Configuration</a></li>
</ul>
</div>

@ -5,8 +5,7 @@
<div class="SubMenugroup">
<h3>Crawler/Spider</h3>
<ul class="SubMenu">
<li><a href="CrawlStartExpert_p.html" class="MenuItemLink lock">Crawl Start (Expert)</a></li>
<li><a href="CrawlStartScanner_p.html" class="MenuItemLink lock">Network Scanner</a></li>
<li><a href="CrawlStartExpert.html" class="MenuItemLink">Crawl Start (Expert)</a></li>
<li><a href="Load_MediawikiWiki.html" class="MenuItemLink">Crawling of MediaWikis</a></li>
<li><a href="Load_PHPBB3.html" class="MenuItemLink">Crawling of phpBB3 Forums</a></li>
</ul>
@ -15,6 +14,7 @@
<div class="SubMenugroup">
<h3>Network Harvesting</h3>
<ul class="SubMenu">
<li><a href="CrawlStartScanner_p.html" class="MenuItemLink lock">Network Scanner</a></li>
<li><a href="RemoteCrawl_p.html" class="MenuItemLink lock">Remote Crawling</a></li>
<li><a href="ProxyIndexingMonitor_p.html" class="MenuItemLink lock">Scraping Proxy</a></li>
</ul>

Loading…
Cancel
Save