- enhancements in surrogate reading

- better display of map in location search

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@7636 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 14 years ago
parent 1ff9947f91
commit cb6f709a16

@ -5,6 +5,12 @@
#%env/templates/metas.template%# #%env/templates/metas.template%#
<script type="text/javascript" src="http://openlayers.org/api/OpenLayers.js"></script> <script type="text/javascript" src="http://openlayers.org/api/OpenLayers.js"></script>
<script type="text/javascript" src="http://www.openstreetmap.org/openlayers/OpenStreetMap.js"></script> <script type="text/javascript" src="http://www.openstreetmap.org/openlayers/OpenStreetMap.js"></script>
<script type="text/javascript">
function adaptHeight() {
document.getElementById('map').style.height = (document.documentElement.clientHeight - 130) + "px";
}
window.onresize = adaptHeight;
</script>
<script type="text/javascript"> <script type="text/javascript">
var map; var map;
var searchLayer_md = null; var searchLayer_md = null;
@ -21,10 +27,11 @@
controls:[ controls:[
new OpenLayers.Control.Navigation(), new OpenLayers.Control.Navigation(),
new OpenLayers.Control.PanZoomBar(), new OpenLayers.Control.PanZoomBar(),
new OpenLayers.Control.ZoomBox(),
new OpenLayers.Control.LayerSwitcher(), new OpenLayers.Control.LayerSwitcher(),
new OpenLayers.Control.Attribution()] new OpenLayers.Control.Attribution()]
}); });
//layerMaplint = new OpenLayers.Layer.OSM.Maplint("Maplint"); //layerMaplint = new OpenLayers.Layer.OSM.Maplint("Maplint");
layerWMS = new OpenLayers.Layer.WMS( "OpenLayers WMS", "http://labs.metacarta.com/wms/vmap0", {layers: 'basic'} ); layerWMS = new OpenLayers.Layer.WMS( "OpenLayers WMS", "http://labs.metacarta.com/wms/vmap0", {layers: 'basic'} );
layerMapnik = new OpenLayers.Layer.OSM.Mapnik("Mapnik"); layerMapnik = new OpenLayers.Layer.OSM.Mapnik("Mapnik");
@ -35,12 +42,13 @@
map.addLayer(layerCycleMap); map.addLayer(layerCycleMap);
//map.addLayer(layerMaplint); //map.addLayer(layerMaplint);
map.addLayer(layerWMS); map.addLayer(layerWMS);
map.addControl(new OpenLayers.Control.LayerSwitcher()); map.addControl(new OpenLayers.Control.LayerSwitcher());
map.setCenter(new OpenLayers.LonLat(15,30) // Center of the map map.setCenter(new OpenLayers.LonLat(9,48) // Center of the map
.transform( .transform(
new OpenLayers.Projection("EPSG:4326"), // transform from WGS 1984 new OpenLayers.Projection("EPSG:4326"), // transform from WGS 1984
new OpenLayers.Projection("EPSG:900913") // to Spherical Mercator Projection new OpenLayers.Projection("EPSG:900913") // to Spherical Mercator Projection
), 2 // Zoom level ), 5 // Zoom level
); );
#(initsearch)#:: #(initsearch)#::
@ -56,10 +64,15 @@
var query = document.getElementById('query').value.replace(' ', '+'); var query = document.getElementById('query').value.replace(' ', '+');
if (searchLayer_md != null) searchLayer_md.destroy(); if (searchLayer_md != null) searchLayer_md.destroy();
if (searchLayer_co != null) searchLayer_co.destroy(); if (searchLayer_co != null) searchLayer_co.destroy();
searchLayer_co = new OpenLayers.Layer.GeoRSS('GeoRSS', path_cosearch + query, {'icon':marker_co}); var center = map.getCenter().transform(new OpenLayers.Projection("EPSG:900913"), new OpenLayers.Projection("EPSG:4326"));
searchLayer_co = new OpenLayers.Layer.GeoRSS('GeoRSS', path_cosearch + query + '&lon=' + center.lon + '&lat=' + center.lat + '&zoom=' + map.getZoom(), {'icon':marker_co});
map.addLayer(searchLayer_co); map.addLayer(searchLayer_co);
searchLayer_md = new OpenLayers.Layer.GeoRSS('GeoRSS', path_mdsearch + query, {'icon':marker_md}); searchLayer_md = new OpenLayers.Layer.GeoRSS('GeoRSS', path_mdsearch + query, {'icon':marker_md});
map.addLayer(searchLayer_md); map.addLayer(searchLayer_md);
//map.zoomToExtent(searchLayer_co.getExtent());
//map.panTo(searchLayer_co.getExtent().getCenterLonLat());
//map.setCenter(searchLayer_co.getExtent().getLonLatFromViewPortPx, 5);
//map.setCenter(searchLayer_co.getExtent().getCenterLonLat(), map.getZoomForExtent(searchLayer_co.getExtent(), true));
document.getElementById('apilink').setAttribute('href', 'yacysearch_location.rss?query=' + query); document.getElementById('apilink').setAttribute('href', 'yacysearch_location.rss?query=' + query);
} }
</script> </script>
@ -92,6 +105,9 @@ To see a list of all APIs, please visit the <a href="http://www.yacy-websuche.de
<input type="submit" onclick="search(); return false;" value="search" onsubmit="search(); return false;" /> <input type="submit" onclick="search(); return false;" value="search" onsubmit="search(); return false;" />
</fieldset> </fieldset>
</form><br/> </form><br/>
<div id="map" style="width:800px; height:600px"></div> <div id="map" style="clear:both; width:100%; height:720px"></div>
<script type="text/javascript">
adaptHeight();
</script>
</body> </body>
</html> </html>

@ -29,6 +29,7 @@ import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.services.SearchSRURSS; import net.yacy.cora.services.SearchSRURSS;
import net.yacy.document.LibraryProvider; import net.yacy.document.LibraryProvider;
import net.yacy.document.geolocalization.Location; import net.yacy.document.geolocalization.Location;
import de.anomic.crawler.CrawlProfile;
import de.anomic.search.Switchboard; import de.anomic.search.Switchboard;
import de.anomic.search.SwitchboardConstants; import de.anomic.search.SwitchboardConstants;
import de.anomic.server.serverCore; import de.anomic.server.serverCore;
@ -62,8 +63,8 @@ public class yacysearch_location {
boolean search_publisher = alltext || post.get("dom", "").indexOf("publisher") >= 0; boolean search_publisher = alltext || post.get("dom", "").indexOf("publisher") >= 0;
boolean search_creator = alltext || post.get("dom", "").indexOf("creator") >= 0; boolean search_creator = alltext || post.get("dom", "").indexOf("creator") >= 0;
boolean search_subject = alltext || post.get("dom", "").indexOf("subject") >= 0; boolean search_subject = alltext || post.get("dom", "").indexOf("subject") >= 0;
long maximumTime = post.getLong("maximumTime", 3000); long maximumTime = post.getLong("maximumTime", 5000);
int maximumRecords = post.getInt("maximumRecords", 200); int maximumRecords = post.getInt("maximumRecords", 3000);
//i.e. http://localhost:8090/yacysearch_location.kml?query=berlin&maximumTime=2000&maximumRecords=100 //i.e. http://localhost:8090/yacysearch_location.kml?query=berlin&maximumTime=2000&maximumRecords=100
int placemarkCounter = 0; int placemarkCounter = 0;
@ -93,7 +94,7 @@ public class yacysearch_location {
// get a queue of search results // get a queue of search results
String rssSearchServiceURL = "http://127.0.0.1:" + sb.getConfig("port", "8090") + "/yacysearch.rss"; String rssSearchServiceURL = "http://127.0.0.1:" + sb.getConfig("port", "8090") + "/yacysearch.rss";
BlockingQueue<RSSMessage> results = new LinkedBlockingQueue<RSSMessage>(); BlockingQueue<RSSMessage> results = new LinkedBlockingQueue<RSSMessage>();
SearchSRURSS.searchSRURSS(results, rssSearchServiceURL, query, maximumTime, Integer.MAX_VALUE, false, false, null); SearchSRURSS.searchSRURSS(results, rssSearchServiceURL, query, maximumTime, Integer.MAX_VALUE, CrawlProfile.CacheStrategy.NOCACHE, false, null);
// take the results and compute some locations // take the results and compute some locations
RSSMessage message; RSSMessage message;

@ -319,7 +319,7 @@ public class ResultFetcher {
// check if we have enough // check if we have enough
if (result.sizeAvailable() >= this.neededResults) { if (result.sizeAvailable() >= this.neededResults) {
Log.logWarning("ResultFetcher", result.sizeAvailable() + " = result.sizeAvailable() >= this.neededResults = " + this.neededResults); //Log.logWarning("ResultFetcher", result.sizeAvailable() + " = result.sizeAvailable() >= this.neededResults = " + this.neededResults);
break; break;
} }

@ -1345,7 +1345,7 @@ public final class Switchboard extends serverSwitch {
} }
public void processSurrogate(final InputStream is, String name) throws IOException { public void processSurrogate(final InputStream is, String name) throws IOException {
SurrogateReader reader = new SurrogateReader(is, 3); SurrogateReader reader = new SurrogateReader(is, 100);
Thread readerThread = new Thread(reader, name); Thread readerThread = new Thread(reader, name);
readerThread.start(); readerThread.start();
DCEntry surrogate; DCEntry surrogate;

@ -80,6 +80,7 @@ import net.yacy.repository.Blacklist;
import org.apache.http.entity.mime.content.ContentBody; import org.apache.http.entity.mime.content.ContentBody;
import de.anomic.crawler.CrawlProfile;
import de.anomic.crawler.ResultURLs; import de.anomic.crawler.ResultURLs;
import de.anomic.crawler.ResultURLs.EventOrigin; import de.anomic.crawler.ResultURLs.EventOrigin;
import de.anomic.search.ContentDomain; import de.anomic.search.ContentDomain;
@ -369,7 +370,7 @@ public final class yacyClient {
} }
} }
public static RSSFeed search(final yacySeed targetSeed, String query, boolean verify, boolean global, long timeout, int startRecord, int maximumRecords) throws IOException { public static RSSFeed search(final yacySeed targetSeed, String query, CrawlProfile.CacheStrategy verify, boolean global, long timeout, int startRecord, int maximumRecords) throws IOException {
String address = (targetSeed == null || targetSeed == Switchboard.getSwitchboard().peers.mySeed()) ? "localhost:" + Switchboard.getSwitchboard().getConfig("port", "8090") : targetSeed.getClusterAddress(); String address = (targetSeed == null || targetSeed == Switchboard.getSwitchboard().peers.mySeed()) ? "localhost:" + Switchboard.getSwitchboard().getConfig("port", "8090") : targetSeed.getClusterAddress();
String urlBase = "http://" + address + "/yacysearch.rss"; String urlBase = "http://" + address + "/yacysearch.rss";
return SearchSRURSS.loadSRURSS(urlBase, query, timeout, startRecord, maximumRecords, verify, global, null); return SearchSRURSS.loadSRURSS(urlBase, query, timeout, startRecord, maximumRecords, verify, global, null);

@ -30,6 +30,8 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import de.anomic.crawler.CrawlProfile;
import net.yacy.cora.document.RSSMessage; import net.yacy.cora.document.RSSMessage;
import net.yacy.cora.protocol.http.HTTPClient; import net.yacy.cora.protocol.http.HTTPClient;
import net.yacy.cora.storage.ConcurrentScoreMap; import net.yacy.cora.storage.ConcurrentScoreMap;
@ -145,7 +147,7 @@ public class SearchHub {
* @param verify * @param verify
* @param global * @param global
*/ */
public static void addSRURSSServices(SearchHub search, String[] rssServices, int count, boolean verify, boolean global, String userAgent) { public static void addSRURSSServices(SearchHub search, String[] rssServices, int count, CrawlProfile.CacheStrategy verify, boolean global, String userAgent) {
for (String service: rssServices) { for (String service: rssServices) {
SearchSRURSS accumulator = new SearchSRURSS(search, service, count, verify, global, userAgent); SearchSRURSS accumulator = new SearchSRURSS(search, service, count, verify, global, userAgent);
accumulator.start(); accumulator.start();
@ -161,7 +163,7 @@ public class SearchHub {
for (String s: args) sb.append(s).append(' '); for (String s: args) sb.append(s).append(' ');
String query = sb.toString().trim(); String query = sb.toString().trim();
SearchHub search = new SearchHub(query, 10000); SearchHub search = new SearchHub(query, 10000);
addSRURSSServices(search, SRURSSServicesList, 100, false, false, "searchhub"); addSRURSSServices(search, SRURSSServicesList, 100, CrawlProfile.CacheStrategy.CACHEONLY, false, "searchhub");
try {Thread.sleep(100);} catch (InterruptedException e1) {} try {Thread.sleep(100);} catch (InterruptedException e1) {}
search.waitTermination(); search.waitTermination();
ScoreMap<String> result = search.getResults(); ScoreMap<String> result = search.getResults();

@ -36,6 +36,8 @@ import java.util.concurrent.TimeUnit;
import org.apache.http.entity.mime.content.ContentBody; import org.apache.http.entity.mime.content.ContentBody;
import de.anomic.crawler.CrawlProfile;
import net.yacy.cora.document.MultiProtocolURI; import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.document.RSSFeed; import net.yacy.cora.document.RSSFeed;
import net.yacy.cora.document.RSSMessage; import net.yacy.cora.document.RSSMessage;
@ -45,13 +47,13 @@ import net.yacy.cora.protocol.http.HTTPConnector;
public class SearchSRURSS extends Thread implements SearchAccumulator { public class SearchSRURSS extends Thread implements SearchAccumulator {
private final static int recordsPerSession = 10; private final static int recordsPerSession = 100;
final String urlBase; final String urlBase;
final String query; final String query;
final long timeoutInit; final long timeoutInit;
final int maximumRecordsInit; final int maximumRecordsInit;
final boolean verify; final CrawlProfile.CacheStrategy verify;
final boolean global; final boolean global;
final Map<RSSMessage, List<Integer>> result; final Map<RSSMessage, List<Integer>> result;
final String userAgent; final String userAgent;
@ -64,7 +66,7 @@ public class SearchSRURSS extends Thread implements SearchAccumulator {
final long timeoutInit, final long timeoutInit,
final String urlBase, final String urlBase,
final int maximumRecordsInit, final int maximumRecordsInit,
final boolean verify, final CrawlProfile.CacheStrategy verify,
final boolean global, final boolean global,
final String userAgent) { final String userAgent) {
this.results = new LinkedBlockingQueue<RSSMessage>(); this.results = new LinkedBlockingQueue<RSSMessage>();
@ -82,7 +84,7 @@ public class SearchSRURSS extends Thread implements SearchAccumulator {
final SearchHub search, final SearchHub search,
final String urlBase, final String urlBase,
final int maximumRecordsInit, final int maximumRecordsInit,
final boolean verify, final CrawlProfile.CacheStrategy verify,
final boolean global, final boolean global,
final String userAgent) { final String userAgent) {
this.results = new LinkedBlockingQueue<RSSMessage>(); this.results = new LinkedBlockingQueue<RSSMessage>();
@ -120,7 +122,7 @@ public class SearchSRURSS extends Thread implements SearchAccumulator {
final String query, final String query,
final long timeoutInit, final long timeoutInit,
final int maximumRecordsInit, final int maximumRecordsInit,
final boolean verify, final CrawlProfile.CacheStrategy verify,
final boolean global, final boolean global,
final String userAgent) { final String userAgent) {
Thread job = new Thread() { Thread job = new Thread() {
@ -178,7 +180,7 @@ public class SearchSRURSS extends Thread implements SearchAccumulator {
long timeout, long timeout,
int startRecord, int startRecord,
int maximumRecords, int maximumRecords,
boolean verify, CrawlProfile.CacheStrategy cacheStrategy,
boolean global, boolean global,
String userAgent) throws IOException { String userAgent) throws IOException {
MultiProtocolURI uri = null; MultiProtocolURI uri = null;
@ -195,7 +197,7 @@ public class SearchSRURSS extends Thread implements SearchAccumulator {
parts.put("query", UTF8.StringBody(query)); parts.put("query", UTF8.StringBody(query));
parts.put("startRecord", UTF8.StringBody(Integer.toString(startRecord))); parts.put("startRecord", UTF8.StringBody(Integer.toString(startRecord)));
parts.put("maximumRecords", UTF8.StringBody(Long.toString(maximumRecords))); parts.put("maximumRecords", UTF8.StringBody(Long.toString(maximumRecords)));
parts.put("verify", UTF8.StringBody(verify ? "true" : "false")); parts.put("verify", UTF8.StringBody(cacheStrategy.toName()));
parts.put("resource", UTF8.StringBody(global ? "global" : "local")); parts.put("resource", UTF8.StringBody(global ? "global" : "local"));
parts.put("nav", UTF8.StringBody("none")); parts.put("nav", UTF8.StringBody("none"));
result = HTTPConnector.getConnector(userAgent == null ? MultiProtocolURI.yacybotUserAgent : userAgent).post(new MultiProtocolURI(rssSearchServiceURL), (int) timeout, uri.getHost(), parts); result = HTTPConnector.getConnector(userAgent == null ? MultiProtocolURI.yacybotUserAgent : userAgent).post(new MultiProtocolURI(rssSearchServiceURL), (int) timeout, uri.getHost(), parts);

Loading…
Cancel
Save