* provide option to delete cached snippet fetching failures

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@7429 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
f1ori 14 years ago
parent 9f38c0023d
commit fafab7a8fe

@ -33,9 +33,10 @@
<dl>
<dt class="TableCellDark">Cache Deletion</dt>
<dd><input type="checkbox" name="deleteCache" id="deleteCache"
onclick="x=document.getElementById('deleteCache').checked;document.getElementById('deleteRobots').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteRobots').disabled=c;"
onclick="x=document.getElementById('deleteCache').checked;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteSearchFl').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteSearchFl').disabled=c;"
/>Delete HTTP &amp; FTP Cache<br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" />Delete robots.txt Cache<br/><br/><br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" />Delete robots.txt Cache<br/>
<input type="checkbox" name="deleteSearchFl" id="deleteSearchFl" disabled="disabled" />Delete cached snippet-fetching failures during search<br/><br/><br/>
<input type="submit" name="deletecomplete" value="Delete"/>
</dd>
</dl>

@ -27,9 +27,11 @@
// if the shell's current path is HTROOT
import java.io.File;
import java.io.IOException;
import net.yacy.cora.protocol.RequestHeader;
import de.anomic.data.WorkTables;
import de.anomic.http.client.Cache;
import de.anomic.search.Switchboard;
import de.anomic.search.SwitchboardConstants;
@ -38,7 +40,7 @@ import de.anomic.server.serverSwitch;
public class ConfigHTCache_p {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) throws IOException {
// return variable that accumulates replacements
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
@ -70,6 +72,9 @@ public class ConfigHTCache_p {
if ("on".equals(post.get("deleteRobots", ""))) {
sb.robots.clear();
}
if ("on".equals(post.get("deleteSearchFl", ""))) {
sb.tables.clear(WorkTables.TABLE_SEARCH_FAILURE_NAME);
}
}
prop.put("HTCachePath", env.getConfig(SwitchboardConstants.HTCACHE_PATH, SwitchboardConstants.HTCACHE_PATH_DEFAULT));

@ -41,11 +41,12 @@
<dl>
<dt class="TableCellDark">Index Deletion</dt>
<dd><input type="checkbox" name="deleteIndex" id="deleteIndex"
onclick="x=document.getElementById('deleteIndex').checked;document.getElementById('deleteCache').checked=x;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteCrawlQueues').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteCache').disabled=c;document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteCrawlQueues').disabled=c;"
onclick="x=document.getElementById('deleteIndex').checked;document.getElementById('deleteCache').checked=x;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteCrawlQueues').checked=x;c='disabled';document.getElementById('deleteSearchFl').checked=x;if(x){c='';}document.getElementById('deleteCache').disabled=c;document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteCrawlQueues').disabled=c;document.getElementById('deleteSearchFl').disabled=c;"
/><label for="deleteIndex">Delete Search Index</label><br/>
<input type="checkbox" name="deleteCrawlQueues" id="deleteCrawlQueues" disabled="disabled" /><label for="deleteCrawlQueues">Stop Crawler and delete Crawl Queues</label><br/>
<input type="checkbox" name="deleteCache" id="deleteCache" disabled="disabled" /><label for="deleteCache">Delete HTTP &amp; FTP Cache</label><br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" /><label for="deleteRobots">Delete robots.txt Cache</label><br/><br/><br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" /><label for="deleteRobots">Delete robots.txt Cache</label><br/>
<input type="checkbox" name="deleteSearchFl" id="deleteSearchFl" disabled="disabled" /><label for="deleteSearchFl">Delete cached snippet-fetching failures during search</label><br/><br/><br/>
<input type="submit" name="deletecomplete" value="Delete"/>
</dd>
</dl>

@ -55,6 +55,7 @@ import net.yacy.repository.Blacklist;
import de.anomic.crawler.CrawlProfile;
import de.anomic.data.ListManager;
import de.anomic.data.WorkTables;
import de.anomic.http.client.Cache;
import de.anomic.search.QueryParams;
import de.anomic.search.RankingProcess;
@ -71,7 +72,7 @@ import de.anomic.yacy.dht.PeerSelection;
public class IndexControlRWIs_p {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) throws IOException {
// return variable that accumulates replacements
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
@ -160,6 +161,9 @@ public class IndexControlRWIs_p {
if (post.get("deleteRobots", "").equals("on")) {
sb.robots.clear();
}
if (post.get("deleteSearchFl", "").equals("on")) {
sb.tables.clear(WorkTables.TABLE_SEARCH_FAILURE_NAME);
}
post.remove("deletecomplete");
}

Loading…
Cancel
Save