diff --git a/htroot/CrawlStartScanner_p.html b/htroot/CrawlStartScanner_p.html
index 0e2088642..2182dcfb0 100644
--- a/htroot/CrawlStartScanner_p.html
+++ b/htroot/CrawlStartScanner_p.html
@@ -46,9 +46,16 @@
Full Intranet Scan: #[intranethosts]#
- timeout: ms, bigrange
#(intranetHint)#::
Do not use intranet scan results, you are not in an intranet environment!
#(/intranetHint)#
+ Time-Out
+
+ ms
+
+ Subnet
+
+ /24 /16
+
Scan Cache
accumulate scan results with access type "granted" into scan cache (do not delete old scan result)
diff --git a/htroot/CrawlStartScanner_p.java b/htroot/CrawlStartScanner_p.java
index 2bec5cea8..cf538c261 100644
--- a/htroot/CrawlStartScanner_p.java
+++ b/htroot/CrawlStartScanner_p.java
@@ -18,7 +18,6 @@
* If not, see .
*/
-
import java.net.InetAddress;
import java.net.MalformedURLException;
import java.util.ConcurrentModificationException;
@@ -42,14 +41,18 @@ import de.anomic.data.WorkTables;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
-public class CrawlStartScanner_p {
+public class CrawlStartScanner_p
+{
private final static int CONCURRENT_RUNNER = 100;
- public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
+ public static serverObjects respond(
+ final RequestHeader header,
+ final serverObjects post,
+ final serverSwitch env) {
final serverObjects prop = new serverObjects();
- final Switchboard sb = (Switchboard)env;
+ final Switchboard sb = (Switchboard) env;
// clean up all search events
SearchEventCache.cleanupEvents(true);
@@ -66,71 +69,112 @@ public class CrawlStartScanner_p {
final Set ips = Domains.myIntranetIPs();
prop.put("intranethosts", ips.toString());
prop.put("intranetHint", sb.isIntranetMode() ? 0 : 1);
- if (hosts.length() == 0) {
+ if ( hosts.length() == 0 ) {
InetAddress ip;
- if (sb.isIntranetMode()) {
- if (ips.size() > 0) ip = ips.iterator().next();
- else ip = Domains.dnsResolve("192.168.0.1");
+ if ( sb.isIntranetMode() ) {
+ if ( ips.size() > 0 ) {
+ ip = ips.iterator().next();
+ } else {
+ ip = Domains.dnsResolve("192.168.0.1");
+ }
} else {
ip = Domains.myPublicLocalIP();
- if (Domains.isThisHostIP(ip)) ip = sb.peers.mySeed().getInetAddress();
+ if ( Domains.isThisHostIP(ip) ) {
+ ip = sb.peers.mySeed().getInetAddress();
+ }
+ }
+ if ( ip != null ) {
+ hosts = ip.getHostAddress();
}
- if (ip != null) hosts = ip.getHostAddress();
}
prop.put("scanhosts", hosts);
// parse post requests
- if (post != null) {
+ if ( post != null ) {
int repeat_time = 0;
String repeat_unit = "seldays";
long validTime = 0;
// check scheduler
- if (post.get("rescan", "").equals("scheduler")) {
+ if ( post.get("rescan", "").equals("scheduler") ) {
repeat_time = post.getInt("repeat_time", -1);
repeat_unit = post.get("repeat_unit", "selminutes"); // selminutes, selhours, seldays
- if (repeat_unit.equals("selminutes")) validTime = repeat_time * 60 * 1000;
- if (repeat_unit.equals("selhours")) validTime = repeat_time * 60 * 60 * 1000;
- if (repeat_unit.equals("seldays")) validTime = repeat_time * 24 * 60 * 60 * 1000;
+ if ( repeat_unit.equals("selminutes") ) {
+ validTime = repeat_time * 60 * 1000;
+ }
+ if ( repeat_unit.equals("selhours") ) {
+ validTime = repeat_time * 60 * 60 * 1000;
+ }
+ if ( repeat_unit.equals("seldays") ) {
+ validTime = repeat_time * 24 * 60 * 60 * 1000;
+ }
}
- final boolean bigrange = post.getBoolean("bigrange", false);
+ final boolean bigrange = post.get("subnet", "24").equals("16");
// case: an IP range was given; scan the range for services and display result
- if (post.containsKey("scan") && "hosts".equals(post.get("source", ""))) {
+ if ( post.containsKey("scan") && "hosts".equals(post.get("source", "")) ) {
final Set ia = new HashSet();
- for (String host : hosts.split(",")) {
- if (host.startsWith("http://")) host = host.substring(7);
- if (host.startsWith("https://")) host = host.substring(8);
- if (host.startsWith("ftp://")) host = host.substring(6);
- if (host.startsWith("smb://")) host = host.substring(6);
- final int p = host.indexOf('/',0);
- if (p >= 0) host = host.substring(0, p);
+ for ( String host : hosts.split(",") ) {
+ if ( host.startsWith("http://") ) {
+ host = host.substring(7);
+ }
+ if ( host.startsWith("https://") ) {
+ host = host.substring(8);
+ }
+ if ( host.startsWith("ftp://") ) {
+ host = host.substring(6);
+ }
+ if ( host.startsWith("smb://") ) {
+ host = host.substring(6);
+ }
+ final int p = host.indexOf('/', 0);
+ if ( p >= 0 ) {
+ host = host.substring(0, p);
+ }
ia.add(Domains.dnsResolve(host));
}
final Scanner scanner = new Scanner(ia, CONCURRENT_RUNNER, timeout);
- if (post.get("scanftp", "").equals("on")) scanner.addFTP(bigrange);
- if (post.get("scanhttp", "").equals("on")) scanner.addHTTP(bigrange);
- if (post.get("scanhttps", "").equals("on")) scanner.addHTTPS(bigrange);
- if (post.get("scansmb", "").equals("on")) scanner.addSMB(bigrange);
+ if ( post.get("scanftp", "").equals("on") ) {
+ scanner.addFTP(bigrange);
+ }
+ if ( post.get("scanhttp", "").equals("on") ) {
+ scanner.addHTTP(bigrange);
+ }
+ if ( post.get("scanhttps", "").equals("on") ) {
+ scanner.addHTTPS(bigrange);
+ }
+ if ( post.get("scansmb", "").equals("on") ) {
+ scanner.addSMB(bigrange);
+ }
scanner.start();
scanner.terminate();
- if ("on".equals(post.get("accumulatescancache", "")) && !"scheduler".equals(post.get("rescan", ""))) {
+ if ( "on".equals(post.get("accumulatescancache", ""))
+ && !"scheduler".equals(post.get("rescan", "")) ) {
Scanner.scancacheExtend(scanner, validTime);
} else {
Scanner.scancacheReplace(scanner, validTime);
}
}
- if (post.containsKey("scan") && "intranet".equals(post.get("source", ""))) {
+ if ( post.containsKey("scan") && "intranet".equals(post.get("source", "")) ) {
final Scanner scanner = new Scanner(Domains.myIntranetIPs(), CONCURRENT_RUNNER, timeout);
- if ("on".equals(post.get("scanftp", ""))) scanner.addFTP(bigrange);
- if ("on".equals(post.get("scanhttp", ""))) scanner.addHTTP(bigrange);
- if ("on".equals(post.get("scanhttps", ""))) scanner.addHTTPS(bigrange);
- if ("on".equals(post.get("scansmb", ""))) scanner.addSMB(bigrange);
+ if ( "on".equals(post.get("scanftp", "")) ) {
+ scanner.addFTP(bigrange);
+ }
+ if ( "on".equals(post.get("scanhttp", "")) ) {
+ scanner.addHTTP(bigrange);
+ }
+ if ( "on".equals(post.get("scanhttps", "")) ) {
+ scanner.addHTTPS(bigrange);
+ }
+ if ( "on".equals(post.get("scansmb", "")) ) {
+ scanner.addSMB(bigrange);
+ }
scanner.start();
scanner.terminate();
- if ("on".equals(post.get("accumulatescancache", "")) && !"scheduler".equals(post.get("rescan", ""))) {
+ if ( "on".equals(post.get("accumulatescancache", ""))
+ && !"scheduler".equals(post.get("rescan", "")) ) {
Scanner.scancacheExtend(scanner, validTime);
} else {
Scanner.scancacheReplace(scanner, validTime);
@@ -138,68 +182,90 @@ public class CrawlStartScanner_p {
}
// check crawl request
- if (post.containsKey("crawl")) {
+ if ( post.containsKey("crawl") ) {
// make a pk/url mapping
final Iterator> se = Scanner.scancacheEntries();
- final Map pkmap = new TreeMap(Base64Order.enhancedCoder);
- while (se.hasNext()) {
+ final Map pkmap =
+ new TreeMap(Base64Order.enhancedCoder);
+ while ( se.hasNext() ) {
final Scanner.Service u = se.next().getKey();
DigestURI uu;
try {
uu = new DigestURI(u.url());
pkmap.put(uu.hash(), uu);
- } catch (final MalformedURLException e) {
+ } catch ( final MalformedURLException e ) {
Log.logException(e);
}
}
// search for crawl start requests in this mapping
- for (final Map.Entry entry: post.entrySet()) {
- if (entry.getValue().startsWith("mark_")) {
- final byte [] pk = entry.getValue().substring(5).getBytes();
+ for ( final Map.Entry entry : post.entrySet() ) {
+ if ( entry.getValue().startsWith("mark_") ) {
+ final byte[] pk = entry.getValue().substring(5).getBytes();
final DigestURI url = pkmap.get(pk);
- if (url != null) {
- String path = "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
+ if ( url != null ) {
+ String path =
+ "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
path += "&crawlingURL=" + url.toNormalform(true, false);
- WorkTables.execAPICall("localhost", (int) sb.getConfigLong("port", 8090), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), path, pk);
+ WorkTables.execAPICall(
+ "localhost",
+ (int) sb.getConfigLong("port", 8090),
+ sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""),
+ path,
+ pk);
}
}
}
}
// check scheduler
- if ("scheduler".equals(post.get("rescan", ""))) {
+ if ( "scheduler".equals(post.get("rescan", "")) ) {
// store this call as api call
- if (repeat_time > 0) {
+ if ( repeat_time > 0 ) {
// store as scheduled api call
- sb.tables.recordAPICall(post, "CrawlStartScanner_p.html", WorkTables.TABLE_API_TYPE_CRAWLER, "network scanner for hosts: " + hosts, repeat_time, repeat_unit.substring(3));
+ sb.tables.recordAPICall(
+ post,
+ "CrawlStartScanner_p.html",
+ WorkTables.TABLE_API_TYPE_CRAWLER,
+ "network scanner for hosts: " + hosts,
+ repeat_time,
+ repeat_unit.substring(3));
}
// execute the scan results
- if (Scanner.scancacheSize() > 0) {
+ if ( Scanner.scancacheSize() > 0 ) {
// make a comment cache
final Map apiCommentCache = WorkTables.commentCache(sb);
String urlString;
DigestURI u;
try {
- final Iterator> se = Scanner.scancacheEntries();
+ final Iterator> se =
+ Scanner.scancacheEntries();
Map.Entry host;
- while (se.hasNext()) {
+ while ( se.hasNext() ) {
host = se.next();
try {
u = new DigestURI(host.getKey().url());
urlString = u.toNormalform(true, false);
- if (host.getValue() == Access.granted && Scanner.inIndex(apiCommentCache, urlString) == null) {
- String path = "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
+ if ( host.getValue() == Access.granted
+ && Scanner.inIndex(apiCommentCache, urlString) == null ) {
+ String path =
+ "/Crawler_p.html?createBookmark=off&xsstopw=off&crawlingDomMaxPages=10000&intention=&range=domain&indexMedia=on&recrawl=nodoubles&xdstopw=off&storeHTCache=on&sitemapURL=&repeat_time=7&crawlingQ=on&cachePolicy=iffresh&indexText=on&crawlingMode=url&mustnotmatch=&crawlingDomFilterDepth=1&crawlingDomFilterCheck=off&crawlingstart=Start%20New%20Crawl&xpstopw=off&repeat_unit=seldays&crawlingDepth=99";
path += "&crawlingURL=" + urlString;
- WorkTables.execAPICall("localhost", (int) sb.getConfigLong("port", 8090), sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""), path, u.hash());
+ WorkTables.execAPICall(
+ "localhost",
+ (int) sb.getConfigLong("port", 8090),
+ sb.getConfig(SwitchboardConstants.ADMIN_ACCOUNT_B64MD5, ""),
+ path,
+ u.hash());
}
- } catch (final MalformedURLException e) {
+ } catch ( final MalformedURLException e ) {
Log.logException(e);
}
}
- } catch (final ConcurrentModificationException e) {}
+ } catch ( final ConcurrentModificationException e ) {
+ }
}
}