- enhanced index deletion in IndexControlRWIs_p: delete also robots.txt database and cache if demanded

- added option for details of deletion
- added deletion to new ConfigHTCache_p servlet

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@7294 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 15 years ago
parent 3e303db1fa
commit 790e0b1894

@ -28,6 +28,21 @@
</fieldset>
</form>
</form>
<form action="ConfigHTCache_p.html" method="post" enctype="multipart/form-data">
<fieldset><legend>Cleanup</legend>
<dl>
<dt class="TableCellDark">Cache Deletion</dt>
<dd><input type="checkbox" name="deleteCache" id="deleteCache"
onclick="x=document.getElementById('deleteCache').checked;document.getElementById('deleteRobots').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteRobots').disabled=c;"
/>Delete HTTP &amp; FTP Cache<br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" />Delete robots.txt Cache<br/><br/><br/>
<input type="submit" name="deletecomplete" value="Delete"/>
</dd>
</dl>
</fieldset>
</form>
#%env/templates/footer.template%#
</body>
</html>

@ -29,9 +29,9 @@
import java.io.File;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import de.anomic.http.client.Cache;
import de.anomic.search.Switchboard;
import de.anomic.search.SwitchboardConstants;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
@ -40,13 +40,13 @@ public class ConfigHTCache_p {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
// return variable that accumulates replacements
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
String oldProxyCachePath, newProxyCachePath;
int newProxyCacheSize;
if (post != null && post.containsKey("set")) try {
if (post != null && post.containsKey("set")) {
// proxyCache - check and create the directory
oldProxyCachePath = env.getConfig(SwitchboardConstants.HTCACHE_PATH, SwitchboardConstants.HTCACHE_PATH_DEFAULT);
newProxyCachePath = post.get("HTCachePath", SwitchboardConstants.HTCACHE_PATH_DEFAULT);
@ -63,8 +63,15 @@ public class ConfigHTCache_p {
if (newProxyCacheSize < 4) { newProxyCacheSize = 4; }
env.setConfig(SwitchboardConstants.PROXY_CACHE_SIZE, newProxyCacheSize);
Cache.setMaxCacheSize(newProxyCacheSize * 1024 * 1024);
} catch (final Exception e) {
Log.logException(e);
}
if (post != null && post.containsKey("deletecomplete")) {
if (post.get("deleteCache", "").equals("on")) {
Cache.clear();
}
if (post.get("deleteRobots", "").equals("on")) {
sb.robots.clear();
}
}
prop.put("HTCachePath", env.getConfig(SwitchboardConstants.HTCACHE_PATH, SwitchboardConstants.HTCACHE_PATH_DEFAULT));

@ -39,8 +39,14 @@
<form action="IndexControlRWIs_p.html" method="post" enctype="multipart/form-data">
<fieldset><legend>Cleanup</legend>
<dl>
<dt class="TableCellDark"><input type="checkbox" name="confirmDelete"/> Index Deletion<br />(deletes all words and all urls)</dt>
<dd><input type="submit" name="deletecomplete" value="Delete index"/>
<dt class="TableCellDark">Index Deletion</dt>
<dd><input type="checkbox" name="deleteIndex" id="deleteIndex"
onclick="x=document.getElementById('deleteIndex').checked;document.getElementById('deleteCache').checked=x;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteCrawlQueues').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteCache').disabled=c;document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteCrawlQueues').disabled=c;"
/>Delete Search Index<br/>
<input type="checkbox" name="deleteCrawlQueues" id="deleteCrawlQueues" disabled="disabled" />Stop Crawler and delete Crawl Queues<br/>
<input type="checkbox" name="deleteCache" id="deleteCache" disabled="disabled" />Delete HTTP &amp; FTP Cache<br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" />Delete robots.txt Cache<br/><br/><br/>
<input type="submit" name="deletecomplete" value="Delete"/>
</dd>
</dl>
</fieldset>

@ -55,6 +55,7 @@ import net.yacy.repository.Blacklist;
import de.anomic.crawler.CrawlProfile;
import de.anomic.data.listManager;
import de.anomic.http.client.Cache;
import de.anomic.search.QueryParams;
import de.anomic.search.RankingProcess;
import de.anomic.search.ReferenceOrder;
@ -145,11 +146,20 @@ public class IndexControlRWIs_p {
}
// delete everything
if (post.containsKey("deletecomplete") && post.containsKey("confirmDelete")) {
segment.clear();
sb.crawlQueues.clear();
sb.crawlStacker.clear();
sb.robots.clear();
if (post.containsKey("deletecomplete")) {
if (post.get("deleteIndex", "").equals("on")) {
segment.clear();
}
if (post.get("deleteCrawlQueues", "").equals("on")) {
sb.crawlQueues.clear();
sb.crawlStacker.clear();
}
if (post.get("deleteCache", "").equals("on")) {
Cache.clear();
}
if (post.get("deleteRobots", "").equals("on")) {
sb.robots.clear();
}
post.remove("deletecomplete");
}

@ -91,6 +91,23 @@ public final class Cache {
Log.logException(e);
}
}
/**
* clear the cache
*/
public static void clear() {
responseHeaderDB.clear();
try {
fileDB.clear();
} catch (IOException e) {
Log.logException(e);
}
try {
fileDBunbuffered.clear();
} catch (IOException e) {
Log.logException(e);
}
}
/**
* This method changes the HTCache size.<br>

Loading…
Cancel
Save