diff --git a/htroot/Network.html b/htroot/Network.html
index 5f88e5f75..07b21ce76 100644
--- a/htroot/Network.html
+++ b/htroot/Network.html
@@ -215,7 +215,7 @@
|
lightred font |
: |
- passiv peers ( < 5 hour passiv time) |
+ passiv peers ( < 5 hour passive time) |
|
diff --git a/htroot/WatchCrawler_p.java b/htroot/WatchCrawler_p.java
index 86dcfe6e4..99a312b89 100644
--- a/htroot/WatchCrawler_p.java
+++ b/htroot/WatchCrawler_p.java
@@ -271,11 +271,14 @@ public class WatchCrawler_p {
yacyURL crawlURL = new yacyURL("file://" + file.toString(), null);
plasmaCrawlProfile.entry profile = switchboard.profilesActiveCrawls.newEntry(fileName, crawlURL, newcrawlingfilter, newcrawlingfilter, newcrawlingdepth, newcrawlingdepth, crawlingIfOlder, crawlingDomFilterDepth, crawlingDomMaxPages, crawlingQ, indexText, indexMedia, storeHTCache, true, crawlOrder, xsstopw, xdstopw, xpstopw);
+ // pause local crawl here
+ switchboard.pauseCrawlJob(plasmaSwitchboard.CRAWLJOB_LOCAL_CRAWL);
+
// loop through the contained links
- Iterator interator = hyperlinks.entrySet().iterator();
+ Iterator linkiterator = hyperlinks.entrySet().iterator();
int c = 0;
- while (interator.hasNext()) {
- Map.Entry e = (Map.Entry) interator.next();
+ while (linkiterator.hasNext()) {
+ Map.Entry e = (Map.Entry) linkiterator.next();
String nexturlstring = (String) e.getKey();
if (nexturlstring == null) continue;
@@ -296,7 +299,7 @@ public class WatchCrawler_p {
}
// enqueuing the url for crawling
- String rejectReason = switchboard.sbStackCrawlThread.stackCrawl(nexturlstring, null, yacyCore.seedDB.mySeed().hash, (String)e.getValue(), new Date(), 1, profile);
+ String rejectReason = switchboard.sbStackCrawlThread.stackCrawl(nexturlstring, null, yacyCore.seedDB.mySeed().hash, (String)e.getValue(), new Date(), 0, profile);
// if something failed add the url into the errorURL list
if (rejectReason == null) {
@@ -320,7 +323,8 @@ public class WatchCrawler_p {
prop.put("info_error", e.getMessage());
e.printStackTrace();
}
- }
+ switchboard.continueCrawlJob(plasmaSwitchboard.CRAWLJOB_LOCAL_CRAWL);
+ }
} else if (crawlingMode.equals(CRAWLING_MODE_SITEMAP)) {
String sitemapURLStr = null;
try {
diff --git a/htroot/proxymsg/error.html b/htroot/proxymsg/error.html
index 1520e6875..5076c8f41 100644
--- a/htroot/proxymsg/error.html
+++ b/htroot/proxymsg/error.html
@@ -1,7 +1,7 @@
- YaCy: Proxy Message
+ YaCy: Error Message
#%env/templates/metas.template%#
diff --git a/source/de/anomic/plasma/plasmaCrawlStacker.java b/source/de/anomic/plasma/plasmaCrawlStacker.java
index 25db7200e..1880a64c7 100644
--- a/source/de/anomic/plasma/plasmaCrawlStacker.java
+++ b/source/de/anomic/plasma/plasmaCrawlStacker.java
@@ -288,9 +288,7 @@ public final class plasmaCrawlStacker {
// check if ip is local ip address
checkInterruption(); // TODO: this is protocol specific
InetAddress hostAddress = serverDomains.dnsResolve(nexturl.getHost());
- if(this.sb.getConfig("yacyDebugMode", "true").equals("true")){
- //just ignore the check in debugmode (useful for tor(.eff.org)
- }else if (hostAddress == null) {
+ if (hostAddress == null) {
// if a http proxy is configured name resolution may not work
if (this.sb.remoteProxyConfig == null || !this.sb.remoteProxyConfig.useProxy()) {
reason = plasmaCrawlEURL.DENIED_UNKNOWN_HOST;