longer timeout for remote crawl queries

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@5573 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 16 years ago
parent 072dd01ac8
commit 65a1de6c05

@ -54,7 +54,7 @@ public class rct_p {
if (post.containsKey("retrieve")) {
final String peerhash = post.get("peer", null);
final yacySeed seed = (peerhash == null) ? null : sb.webIndex.seedDB.getConnected(peerhash);
final RSSFeed feed = (seed == null) ? null : yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 20, 10000);
final RSSFeed feed = (seed == null) ? null : yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 20, 60000);
if (feed != null) {
for (final RSSMessage item: feed) {
//System.out.println("URL=" + item.getLink() + ", desc=" + item.getDescription() + ", pubDate=" + item.getPubDate());

@ -363,7 +363,7 @@ public class CrawlQueues {
if (seed == null) return false;
// we know a peer which should provide remote crawl entries. load them now.
final RSSFeed feed = yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 30, 5000);
final RSSFeed feed = yacyClient.queryRemoteCrawlURLs(sb.webIndex.seedDB, seed, 30, 60000);
if (feed == null || feed.size() == 0) {
// something is wrong with this provider. To prevent that we get not stuck with this peer
// we remove it from the peer list

@ -391,7 +391,7 @@ public final class yacyClient {
// send request
try {
/* a long time-out is needed */
final byte[] result = wput("http://" + target.getClusterAddress() + "/yacy/urls.xml", target.getHexHash() + ".yacyh", post, 60000);
final byte[] result = wput("http://" + target.getClusterAddress() + "/yacy/urls.xml", target.getHexHash() + ".yacyh", post, (int) maxTime);
final RSSReader reader = RSSReader.parse(result);
if (reader == null) {
yacyCore.log.logWarning("yacyClient.queryRemoteCrawlURLs failed asking peer '" + target.getName() + "': probably bad response from remote peer (1), reader == null");

Loading…
Cancel
Save