superfluous there; it can be changed in the scheduler servlet. It's also confusing in the presence of the delete-option, which will be implemented next. - removed unused crawl start servlet - some refactoring to make the time parser reusablepull/1/head
parent
2e7219f9fd
commit
1c66de4bd4
@ -1,71 +0,0 @@
|
|||||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
|
||||||
<html xmlns="http://www.w3.org/1999/xhtml">
|
|
||||||
<head>
|
|
||||||
<title>YaCy '#[clientname]#': Index Creation with a Web Crawl for a Single Domain</title>
|
|
||||||
#%env/templates/metas.template%#
|
|
||||||
<script type="text/javascript" src="/js/ajax.js"></script>
|
|
||||||
<script type="text/javascript" src="/js/IndexCreate.js"></script>
|
|
||||||
</head>
|
|
||||||
<body id="IndexCreate">
|
|
||||||
#%env/templates/header.template%#
|
|
||||||
#%env/templates/submenuIndexCreate.template%#
|
|
||||||
<h2>Easy Crawl Start</h2>
|
|
||||||
|
|
||||||
<p id="startCrawling">
|
|
||||||
<strong>Start Crawling Job:</strong>
|
|
||||||
You can define URLs as start points for Web page crawling and start crawling here.
|
|
||||||
"Crawling" means that YaCy will download the given web-site, extract all links in it
|
|
||||||
and then download the content behind these links.
|
|
||||||
This is repeated as long as specified under "Crawling Depth".
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<form action="Crawler_p.html" method="post" enctype="multipart/form-data" accept-charset="UTF-8">
|
|
||||||
<input type="hidden" name="crawlingFilter" value=".*" />
|
|
||||||
<input type="hidden" name="crawlingIfOlderCheck" value="off" />
|
|
||||||
<input type="hidden" name="crawlingDomFilterCheck" value="off" />
|
|
||||||
<input type="hidden" name="crawlingDomMaxCheck" value="off" />
|
|
||||||
<input type="hidden" name="crawlingQ" value="off" />
|
|
||||||
<input type="hidden" name="storeHTCache" value="on" />
|
|
||||||
<input type="hidden" name="indexText" value="on" />
|
|
||||||
<input type="hidden" name="indexMedia" value="on" />
|
|
||||||
<input type="hidden" name="crawlOrder" value="on" />
|
|
||||||
<input type="hidden" name="intention" value="simple web crawl" />
|
|
||||||
<input type="hidden" name="xsstopw" value="off" />
|
|
||||||
<table border="0" cellpadding="5" cellspacing="1">
|
|
||||||
<tr class="TableHeader">
|
|
||||||
<td><strong>Attribut</strong></td>
|
|
||||||
<td><strong>Value</strong></td>
|
|
||||||
<td><strong>Description</strong></td>
|
|
||||||
</tr>
|
|
||||||
<tr valign="top" class="TableCellSummary">
|
|
||||||
<td>Starting Point:</td>
|
|
||||||
<td>
|
|
||||||
<input name="crawlingURL" type="text" size="41" maxlength="256" value="http://" onkeypress="changed()" />
|
|
||||||
<span id="robotsOK"></span><br />
|
|
||||||
<span id="title"><br/></span>
|
|
||||||
<img src="/env/grafics/empty.gif" name="ajax" alt="empty" />
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
Enter here the start url of the web crawl.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr valign="top" class="TableCellLight">
|
|
||||||
<td><label for="crawlingDepth">Crawling Range</label>:</td>
|
|
||||||
<td>
|
|
||||||
<input type="radio" name="range" value="wide" checked="checked" />Wide: depth <input name="crawlingDepth" id="crawlingDepth" type="text" size="2" maxlength="2" value="#[crawlingDepth]#" /> |
|
|
||||||
<input type="radio" name="range" value="domain" />Complete Domain
|
|
||||||
</td>
|
|
||||||
<td>
|
|
||||||
The range defines if the crawl shall consider a complete domain, or a wide crawl up to a specific depth.
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
<tr valign="top" class="TableCellLight">
|
|
||||||
<td colspan="3"><input type="submit" name="crawlingstart" value="Start New Distributed Crawl" class="submitready" style="width:240px;"/></td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
</form>
|
|
||||||
|
|
||||||
#%env/templates/footer.template%#
|
|
||||||
</body>
|
|
||||||
</html>
|
|
@ -1,96 +0,0 @@
|
|||||||
// IndexCreateDomainCrawl_p
|
|
||||||
// (C) 2004 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
|
|
||||||
// first published 02.12.2004 as IndexCreate_p.java on http://yacy.net
|
|
||||||
//
|
|
||||||
// This is a part of YaCy, a peer-to-peer based web search engine
|
|
||||||
//
|
|
||||||
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
|
|
||||||
// $LastChangedRevision: 1986 $
|
|
||||||
// $LastChangedBy: orbiter $
|
|
||||||
//
|
|
||||||
// LICENSE
|
|
||||||
//
|
|
||||||
// This program is free software; you can redistribute it and/or modify
|
|
||||||
// it under the terms of the GNU General Public License as published by
|
|
||||||
// the Free Software Foundation; either version 2 of the License, or
|
|
||||||
// (at your option) any later version.
|
|
||||||
//
|
|
||||||
// This program is distributed in the hope that it will be useful,
|
|
||||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
// GNU General Public License for more details.
|
|
||||||
//
|
|
||||||
// You should have received a copy of the GNU General Public License
|
|
||||||
// along with this program; if not, write to the Free Software
|
|
||||||
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
||||||
|
|
||||||
import net.yacy.cora.protocol.RequestHeader;
|
|
||||||
import net.yacy.search.SwitchboardConstants;
|
|
||||||
import net.yacy.server.serverObjects;
|
|
||||||
import net.yacy.server.serverSwitch;
|
|
||||||
|
|
||||||
public class IndexCreateDomainCrawl_p {
|
|
||||||
|
|
||||||
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, final serverSwitch env) {
|
|
||||||
// return variable that accumulates replacements
|
|
||||||
//Switchboard sb = (Switchboard) env;
|
|
||||||
serverObjects prop = new serverObjects();
|
|
||||||
|
|
||||||
// define visible variables
|
|
||||||
prop.put("proxyPrefetchDepth", env.getConfig("proxyPrefetchDepth", "0"));
|
|
||||||
prop.put("crawlingDepth", Math.min(3, env.getConfigLong("crawlingDepth", 0)));
|
|
||||||
prop.put("crawlingFilter", env.getConfig("crawlingFilter", "0"));
|
|
||||||
|
|
||||||
int crawlingIfOlder = (int) env.getConfigLong("crawlingIfOlder", -1);
|
|
||||||
prop.put("crawlingIfOlderCheck", (crawlingIfOlder == -1) ? "0" : "1");
|
|
||||||
prop.put("crawlingIfOlderUnitYearCheck", "0");
|
|
||||||
prop.put("crawlingIfOlderUnitMonthCheck", "0");
|
|
||||||
prop.put("crawlingIfOlderUnitDayCheck", "0");
|
|
||||||
prop.put("crawlingIfOlderUnitHourCheck", "0");
|
|
||||||
prop.put("crawlingIfOlderUnitMinuteCheck", "0");
|
|
||||||
if ((crawlingIfOlder == -1) || (crawlingIfOlder == Integer.MAX_VALUE)) {
|
|
||||||
prop.put("crawlingIfOlderNumber", "-1");
|
|
||||||
prop.put("crawlingIfOlderUnitYearCheck", "1");
|
|
||||||
} else if (crawlingIfOlder >= 60*24*365) {
|
|
||||||
prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24*365)));
|
|
||||||
prop.put("crawlingIfOlderUnitYearCheck", "1");
|
|
||||||
} else if (crawlingIfOlder >= 60*24*30) {
|
|
||||||
prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24*30)));
|
|
||||||
prop.put("crawlingIfOlderUnitMonthCheck", "1");
|
|
||||||
} else if (crawlingIfOlder >= 60*24) {
|
|
||||||
prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24)));
|
|
||||||
prop.put("crawlingIfOlderUnitDayCheck", "1");
|
|
||||||
} else if (crawlingIfOlder >= 60) {
|
|
||||||
prop.put("crawlingIfOlderNumber", Math.round(crawlingIfOlder / 60f));
|
|
||||||
prop.put("crawlingIfOlderUnitHourCheck", "1");
|
|
||||||
} else {
|
|
||||||
prop.put("crawlingIfOlderNumber", crawlingIfOlder);
|
|
||||||
prop.put("crawlingIfOlderUnitMinuteCheck", "1");
|
|
||||||
}
|
|
||||||
int crawlingDomFilterDepth = (int) env.getConfigLong("crawlingDomFilterDepth", -1);
|
|
||||||
prop.put("crawlingDomFilterCheck", (crawlingDomFilterDepth == -1) ? "0" : "1");
|
|
||||||
prop.put("crawlingDomFilterDepth", (crawlingDomFilterDepth == -1) ? 1 : crawlingDomFilterDepth);
|
|
||||||
int crawlingDomMaxPages = (int) env.getConfigLong("crawlingDomMaxPages", -1);
|
|
||||||
prop.put("crawlingDomMaxCheck", (crawlingDomMaxPages == -1) ? "0" : "1");
|
|
||||||
prop.put("crawlingDomMaxPages", (crawlingDomMaxPages == -1) ? 10000 : crawlingDomMaxPages);
|
|
||||||
prop.put("crawlingQChecked", env.getConfigBool("crawlingQ", false) ? "1" : "0");
|
|
||||||
prop.put("storeHTCacheChecked", env.getConfigBool("storeHTCache", false) ? "1" : "0");
|
|
||||||
prop.put("indexingTextChecked", env.getConfigBool("indexText", false) ? "1" : "0");
|
|
||||||
prop.put("indexingMediaChecked", env.getConfigBool("indexMedia", false) ? "1" : "0");
|
|
||||||
prop.put("crawlOrderChecked", env.getConfigBool("crawlOrder", false) ? "1" : "0");
|
|
||||||
|
|
||||||
long LCbusySleep = env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 100L);
|
|
||||||
int LCppm = (LCbusySleep == 0) ? 1000 : (int) (60000L / LCbusySleep);
|
|
||||||
prop.put("crawlingSpeedMaxChecked", (LCppm >= 1000) ? "1" : "0");
|
|
||||||
prop.put("crawlingSpeedCustChecked", ((LCppm > 10) && (LCppm < 1000)) ? "1" : "0");
|
|
||||||
prop.put("crawlingSpeedMinChecked", (LCppm <= 10) ? "1" : "0");
|
|
||||||
prop.put("customPPMdefault", ((LCppm > 10) && (LCppm < 1000)) ? Integer.toString(LCppm) : "");
|
|
||||||
|
|
||||||
prop.put("xsstopwChecked", env.getConfigBool("xsstopw", false) ? "1" : "0");
|
|
||||||
prop.put("xdstopwChecked", env.getConfigBool("xdstopw", false) ? "1" : "0");
|
|
||||||
prop.put("xpstopwChecked", env.getConfigBool("xpstopw", false) ? "1" : "0");
|
|
||||||
|
|
||||||
// return rewrite properties
|
|
||||||
return prop;
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in new issue