From 34354cf9b21bf7e08105045bda22c97aa0c62ec4 Mon Sep 17 00:00:00 2001 From: orbiter Date: Wed, 13 Jan 2010 23:19:45 +0000 Subject: [PATCH] added a servlet that has been removed in SVN 4881; this servlet is now splitted and will be used for a simple crawl start and a remote crawl monitor (not yet integrated into the interface) git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@6582 6c8d7289-2bf4-0310-a012-ef5d649a1542 --- htroot/CrawlMonitorRemoteStart.html | 59 ++++++++++++++++ htroot/CrawlMonitorRemoteStart.java | 97 +++++++++++++++++++++++++ htroot/CrawlProfileEditor_p.java | 2 +- htroot/IndexCreateDomainCrawl_p.html | 71 +++++++++++++++++++ htroot/IndexCreateDomainCrawl_p.java | 102 +++++++++++++++++++++++++++ 5 files changed, 330 insertions(+), 1 deletion(-) create mode 100644 htroot/CrawlMonitorRemoteStart.html create mode 100644 htroot/CrawlMonitorRemoteStart.java create mode 100644 htroot/IndexCreateDomainCrawl_p.html create mode 100644 htroot/IndexCreateDomainCrawl_p.java diff --git a/htroot/CrawlMonitorRemoteStart.html b/htroot/CrawlMonitorRemoteStart.html new file mode 100644 index 000000000..6afd5f0a8 --- /dev/null +++ b/htroot/CrawlMonitorRemoteStart.html @@ -0,0 +1,59 @@ + + + + YaCy '#[clientname]#': Monitor for remotely started global crawls + #%env/templates/metas.template%# + + + + + #%env/templates/header.template%# + #%env/templates/submenuIndexCreate.template%# +

Recently started remote crawls in progress

+ +

Remote crawl start points, crawl is ongoing

+ + + + + + + + + + #{otherCrawlStartInProgress}# + + + + + + + + + #{/otherCrawlStartInProgress}# +
Start TimePeer NameStart URLIntention/DescriptionDepthAccept '?' URLs
#[cre]##[peername]##[startURL]##[intention]##[generalDepth]##(crawlingQ)#no::yes#(/crawlingQ)#
+

Remote crawl start points, finished:

+ + + + + + + + + + #{otherCrawlStartFinished}# + + + + + + + + + #{/otherCrawlStartFinished}# +
Start TimePeer NameStart URLIntention/DescriptionDepthAccept '?' URLs
#[cre]##[peername]##[startURL]##[intention]##[generalDepth]##(crawlingQ)#no::yes#(/crawlingQ)#
+ + #%env/templates/footer.template%# + + \ No newline at end of file diff --git a/htroot/CrawlMonitorRemoteStart.java b/htroot/CrawlMonitorRemoteStart.java new file mode 100644 index 000000000..ffd60230e --- /dev/null +++ b/htroot/CrawlMonitorRemoteStart.java @@ -0,0 +1,97 @@ +// CrawlMonitorRemoteStart.java +// (C) 2004 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany +// first published 02.12.2004 as IndexCreate_p.java on http://yacy.net +// +// This is a part of YaCy, a peer-to-peer based web search engine +// +// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $ +// $LastChangedRevision: 1986 $ +// $LastChangedBy: orbiter $ +// +// LICENSE +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import java.util.Iterator; + +import de.anomic.http.server.RequestHeader; +import de.anomic.search.Switchboard; +import de.anomic.search.SwitchboardConstants; +import de.anomic.server.serverObjects; +import de.anomic.server.serverSwitch; +import de.anomic.yacy.yacyNewsPool; +import de.anomic.yacy.yacyNewsRecord; +import de.anomic.yacy.yacySeed; + +public class CrawlMonitorRemoteStart { + + public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) { + // return variable that accumulates replacements + Switchboard sb = (Switchboard) env; + serverObjects prop = new serverObjects(); + + boolean dark = true; + + // create other peer crawl table using YaCyNews + Iterator recordIterator = sb.peers.newsPool.recordIterator(yacyNewsPool.INCOMING_DB, true); + int showedCrawl = 0; + yacyNewsRecord record; + yacySeed peer; + String peername; + while (recordIterator.hasNext()) { + record = recordIterator.next(); + if (record == null) continue; + if (record.category().equals(yacyNewsPool.CATEGORY_CRAWL_START)) { + peer = sb.peers.get(record.originator()); + if (peer == null) peername = record.originator(); else peername = peer.getName(); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_dark", dark ? "1" : "0"); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_cre", record.created().toString()); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_peername", peername); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_startURL", record.attributes().get("startURL").toString()); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_intention", record.attributes().get("intention").toString()); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_generalDepth", record.attributes().get("generalDepth")); + prop.put("otherCrawlStartInProgress_" + showedCrawl + "_crawlingQ", (record.attributes().get("crawlingQ").equals("true")) ? "1" : "0"); + showedCrawl++; + if (showedCrawl > 20) break; + } + } + prop.put("otherCrawlStartInProgress", showedCrawl); + + // finished remote crawls + recordIterator = sb.peers.newsPool.recordIterator(yacyNewsPool.PROCESSED_DB, true); + showedCrawl = 0; + while (recordIterator.hasNext()) { + record = (yacyNewsRecord) recordIterator.next(); + if (record == null) continue; + if (record.category().equals(yacyNewsPool.CATEGORY_CRAWL_START)) { + peer = sb.peers.get(record.originator()); + if (peer == null) peername = record.originator(); else peername = peer.getName(); + prop.put("otherCrawlStartFinished_" + showedCrawl + "_dark", dark ? "1" : "0"); + prop.put("otherCrawlStartFinished_" + showedCrawl + "_cre", record.created().toString()); + prop.putHTML("otherCrawlStartFinished_" + showedCrawl + "_peername", peername); + prop.putHTML("otherCrawlStartFinished_" + showedCrawl + "_startURL", record.attributes().get("startURL").toString()); + prop.put("otherCrawlStartFinished_" + showedCrawl + "_intention", record.attributes().get("intention").toString()); + prop.put("otherCrawlStartFinished_" + showedCrawl + "_generalDepth", record.attributes().get("generalDepth")); + prop.put("otherCrawlStartFinished_" + showedCrawl + "_crawlingQ", (record.attributes().get("crawlingQ").equals("true")) ? "1" : "0"); + showedCrawl++; + if (showedCrawl > 20) break; + } + } + prop.put("otherCrawlStartFinished", showedCrawl); + + // return rewrite properties + return prop; + } +} \ No newline at end of file diff --git a/htroot/CrawlProfileEditor_p.java b/htroot/CrawlProfileEditor_p.java index 11e167bcb..feb676da4 100644 --- a/htroot/CrawlProfileEditor_p.java +++ b/htroot/CrawlProfileEditor_p.java @@ -138,7 +138,7 @@ public class CrawlProfileEditor_p { } prop.put("profiles", count); selentry = sb.crawler.profilesActiveCrawls.getEntry(handle); - assert selentry.handle() != null; + assert selentry == null || selentry.handle() != null; // read post for change submit if ((post != null) && (selentry != null)) { if (post.containsKey("submit")) { diff --git a/htroot/IndexCreateDomainCrawl_p.html b/htroot/IndexCreateDomainCrawl_p.html new file mode 100644 index 000000000..215868cd3 --- /dev/null +++ b/htroot/IndexCreateDomainCrawl_p.html @@ -0,0 +1,71 @@ + + + + YaCy '#[clientname]#': Index Creation with a Web Crawl for a Single Domain + #%env/templates/metas.template%# + + + + + #%env/templates/header.template%# + #%env/templates/submenuIndexCreate.template%# +

Easy Crawl Start

+ +

+ Start Crawling Job:  + You can define URLs as start points for Web page crawling and start crawling here. + "Crawling" means that YaCy will download the given web-site, extract all links in it + and then download the content behind these links. + This is repeated as long as specified under "Crawling Depth". +

+ +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
AttributValueDescription
Starting Point: + +
+
+ empty +
+ Enter here the start url of the web crawl. +
: + Wide: depth   |   + Complete Domain + + The range defines if the crawl shall consider a complete domain, or a wide crawl up to a specific depth. +
+
+ + #%env/templates/footer.template%# + + \ No newline at end of file diff --git a/htroot/IndexCreateDomainCrawl_p.java b/htroot/IndexCreateDomainCrawl_p.java new file mode 100644 index 000000000..8390a4398 --- /dev/null +++ b/htroot/IndexCreateDomainCrawl_p.java @@ -0,0 +1,102 @@ +// IndexCreateDomainCrawl_p +// (C) 2004 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany +// first published 02.12.2004 as IndexCreate_p.java on http://yacy.net +// +// This is a part of YaCy, a peer-to-peer based web search engine +// +// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $ +// $LastChangedRevision: 1986 $ +// $LastChangedBy: orbiter $ +// +// LICENSE +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import java.util.Iterator; + +import de.anomic.http.server.RequestHeader; +import de.anomic.search.Switchboard; +import de.anomic.search.SwitchboardConstants; +import de.anomic.server.serverObjects; +import de.anomic.server.serverSwitch; +import de.anomic.yacy.yacyNewsPool; +import de.anomic.yacy.yacyNewsRecord; +import de.anomic.yacy.yacySeed; + +public class IndexCreateDomainCrawl_p { + + public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) { + // return variable that accumulates replacements + Switchboard sb = (Switchboard) env; + serverObjects prop = new serverObjects(); + + // define visible variables + prop.put("proxyPrefetchDepth", env.getConfig("proxyPrefetchDepth", "0")); + prop.put("crawlingDepth", Math.min(3, env.getConfigLong("crawlingDepth", 0))); + prop.put("crawlingFilter", env.getConfig("crawlingFilter", "0")); + + int crawlingIfOlder = (int) env.getConfigLong("crawlingIfOlder", -1); + prop.put("crawlingIfOlderCheck", (crawlingIfOlder == -1) ? "0" : "1"); + prop.put("crawlingIfOlderUnitYearCheck", "0"); + prop.put("crawlingIfOlderUnitMonthCheck", "0"); + prop.put("crawlingIfOlderUnitDayCheck", "0"); + prop.put("crawlingIfOlderUnitHourCheck", "0"); + prop.put("crawlingIfOlderUnitMinuteCheck", "0"); + if ((crawlingIfOlder == -1) || (crawlingIfOlder == Integer.MAX_VALUE)) { + prop.put("crawlingIfOlderNumber", "-1"); + prop.put("crawlingIfOlderUnitYearCheck", "1"); + } else if (crawlingIfOlder >= 60*24*365) { + prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24*365))); + prop.put("crawlingIfOlderUnitYearCheck", "1"); + } else if (crawlingIfOlder >= 60*24*30) { + prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24*30))); + prop.put("crawlingIfOlderUnitMonthCheck", "1"); + } else if (crawlingIfOlder >= 60*24) { + prop.put("crawlingIfOlderNumber", Math.round((float)crawlingIfOlder / (float)(60*24))); + prop.put("crawlingIfOlderUnitDayCheck", "1"); + } else if (crawlingIfOlder >= 60) { + prop.put("crawlingIfOlderNumber", Math.round(crawlingIfOlder / 60f)); + prop.put("crawlingIfOlderUnitHourCheck", "1"); + } else { + prop.put("crawlingIfOlderNumber", crawlingIfOlder); + prop.put("crawlingIfOlderUnitMinuteCheck", "1"); + } + int crawlingDomFilterDepth = (int) env.getConfigLong("crawlingDomFilterDepth", -1); + prop.put("crawlingDomFilterCheck", (crawlingDomFilterDepth == -1) ? "0" : "1"); + prop.put("crawlingDomFilterDepth", (crawlingDomFilterDepth == -1) ? 1 : crawlingDomFilterDepth); + int crawlingDomMaxPages = (int) env.getConfigLong("crawlingDomMaxPages", -1); + prop.put("crawlingDomMaxCheck", (crawlingDomMaxPages == -1) ? "0" : "1"); + prop.put("crawlingDomMaxPages", (crawlingDomMaxPages == -1) ? 10000 : crawlingDomMaxPages); + prop.put("crawlingQChecked", env.getConfigBool("crawlingQ", false) ? "1" : "0"); + prop.put("storeHTCacheChecked", env.getConfigBool("storeHTCache", false) ? "1" : "0"); + prop.put("indexingTextChecked", env.getConfigBool("indexText", false) ? "1" : "0"); + prop.put("indexingMediaChecked", env.getConfigBool("indexMedia", false) ? "1" : "0"); + prop.put("crawlOrderChecked", env.getConfigBool("crawlOrder", false) ? "1" : "0"); + + long LCbusySleep = Integer.parseInt(env.getConfig(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, "100")); + int LCppm = (LCbusySleep == 0) ? 1000 : (int) (60000L / LCbusySleep); + prop.put("crawlingSpeedMaxChecked", (LCppm >= 1000) ? "1" : "0"); + prop.put("crawlingSpeedCustChecked", ((LCppm > 10) && (LCppm < 1000)) ? "1" : "0"); + prop.put("crawlingSpeedMinChecked", (LCppm <= 10) ? "1" : "0"); + prop.put("customPPMdefault", ((LCppm > 10) && (LCppm < 1000)) ? Integer.toString(LCppm) : ""); + + prop.put("xsstopwChecked", env.getConfigBool("xsstopw", false) ? "1" : "0"); + prop.put("xdstopwChecked", env.getConfigBool("xdstopw", false) ? "1" : "0"); + prop.put("xpstopwChecked", env.getConfigBool("xpstopw", false) ? "1" : "0"); + + // return rewrite properties + return prop; + } +} \ No newline at end of file