From 534f09e92b2ae7984c401f4f8d8b2921dd97b6cd Mon Sep 17 00:00:00 2001
From: luccioman
Use Case: You get entries here, if you start a local crawl on the 'Index Creation'-Page and check the - 'Do Remote Indexing'-flag. Every page that a remote peer indexes upon this peer's request - is reported back and can be monitored here.
+Use Case: You get entries here, if you start a local crawl on the 'Advanced Crawler' page and check the + 'Do Remote Indexing'-flag, and if you checked the 'Accept Remote Crawl Requests'-flag on the 'Remote Crawling' page. +
+Every page that a remote peer indexes upon this peer's request is reported back and can be monitored here.
+ #(remoteCrawlerDisabled)#::No remote crawl results can currently been added to the local index as the remote crawler is disabled on this peer.
This index transfer was initiated by your peer by doing a search query. @@ -59,7 +61,8 @@
These pages had been indexed by your peer, but the crawl was initiated by a remote peer. This is the 'mirror'-case of process (1).
-Use Case: This list may fill if you check the 'Accept remote crawling requests'-flag on the 'Index Create' page
+Use Case: This list may fill if you check the 'Accept Remote Crawl Requests'-flag on the 'Remote Crawling' page
+ #(remoteCrawlerDisabled)#::The remote crawler is currently disabled
These records had been imported from surrogate files in DATA/SURROGATES/in
diff --git a/htroot/CrawlResults.java b/htroot/CrawlResults.java index 3429ac31b..262ed172f 100644 --- a/htroot/CrawlResults.java +++ b/htroot/CrawlResults.java @@ -50,6 +50,7 @@ import net.yacy.kelondro.util.FileUtils; import net.yacy.peers.Seed; import net.yacy.repository.Blacklist; import net.yacy.search.Switchboard; +import net.yacy.search.SwitchboardConstants; import net.yacy.search.schema.CollectionSchema; import net.yacy.server.serverObjects; import net.yacy.server.serverSwitch; @@ -348,6 +349,7 @@ public class CrawlResults { } prop.put("process", tabletype.getCode()); + prop.put("process_remoteCrawlerDisabled", !sb.getConfigBool(SwitchboardConstants.CRAWLJOB_REMOTE, false)); // return rewrite properties return prop; } diff --git a/htroot/CrawlStartExpert.html b/htroot/CrawlStartExpert.html index 66be62a9f..57ca869d2 100644 --- a/htroot/CrawlStartExpert.html +++ b/htroot/CrawlStartExpert.html @@ -98,10 +98,17 @@ } // Remote crawl + var remoteCrawlerDisabledInfo = document.getElementById('remoteCrawlerDisabledInfo'); if ($('#crawlOrder').isChecked()) { + if(remoteCrawlerDisabledInfo != null) { + remoteCrawlerDisabledInfo.className = ''; + } $('#intention').enable(); if (cId === "crawlOrder") { $('#intention').focus(); } } else { + if(remoteCrawlerDisabledInfo != null) { + remoteCrawlerDisabledInfo.className = 'hidden'; + } $('#intention').disable(); } } @@ -623,9 +630,16 @@ so they can omit starting a crawl with the same start point.Remote crawl results won't be added to the local index as the remote crawler is disabled on this peer. +You can activate it in the Remote Crawl Configuration page. |
+ |
- + |
: diff --git a/htroot/CrawlStartExpert.java b/htroot/CrawlStartExpert.java index f11f403e1..6f463218c 100644 --- a/htroot/CrawlStartExpert.java +++ b/htroot/CrawlStartExpert.java @@ -620,7 +620,10 @@ public class CrawlStartExpert { // Do Remote Indexing? if (sb.isP2PMode()) { prop.put("remoteindexing", 1); - prop.put("remoteindexing_crawlOrderChecked", env.getConfigBool("crawlOrder", true) ? 1 : 0); + prop.put("remoteindexing_remoteCrawlerDisabled", + !sb.getConfigBool(SwitchboardConstants.CRAWLJOB_REMOTE, false)); + prop.put("remoteindexing_remoteCrawlerDisabled_crawlOrderChecked", env.getConfigBool("crawlOrder", true)); + prop.put("remoteindexing_crawlOrderChecked", env.getConfigBool("crawlOrder", true)); prop.put("remoteindexing_intention", ""); } else { prop.put("remoteindexing", 0); @@ -632,7 +635,10 @@ public class CrawlStartExpert { post.getBoolean("indexMedia") ? 1 : 0); if (sb.isP2PMode()) { prop.put("remoteindexing", 1); - prop.put("remoteindexing_crawlOrderChecked", post.getBoolean("crawlOrder") ? 1 : 0); + prop.put("remoteindexing_remoteCrawlerDisabled", + !sb.getConfigBool(SwitchboardConstants.CRAWLJOB_REMOTE, false)); + prop.put("remoteindexing_remoteCrawlerDisabled_crawlOrderChecked", post.getBoolean("crawlOrder")); + prop.put("remoteindexing_crawlOrderChecked", post.getBoolean("crawlOrder")); prop.put("remoteindexing_intention", post.get("intention", "")); } else { prop.put("remoteindexing", 0); diff --git a/htroot/Crawler_p.html b/htroot/Crawler_p.html index c3a7fb6b7..a2c9fd24a 100644 --- a/htroot/Crawler_p.html +++ b/htroot/Crawler_p.html @@ -227,6 +227,13 @@ window.setInterval("setTableSize()", 1000); #(/info)# +#(wontReceiptRemoteResults)#:: +
+
+#(/wontReceiptRemoteResults)#
+
You asked for remote indexing, but remote crawl results won't be added to the local index as the remote crawler is currently disabled on this peer. +You can activate it in the Remote Crawl Configuration page. + |