remove obsolete searchfl work table

was used to register urls with not complete words in snippet but is never accessed
pull/8/head
reger 10 years ago
parent d0aff91f23
commit 49b79987c9

@ -33,10 +33,10 @@
<dl>
<dt class="TableCellDark">Cache Deletion</dt>
<dd><input type="checkbox" name="deleteCache" id="deleteCache"
onclick="x=document.getElementById('deleteCache').checked;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteSearchFl').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteSearchFl').disabled=c;"
onclick="x=document.getElementById('deleteCache').checked;document.getElementById('deleteRobots').checked=x;c='disabled';if(x){c='';}document.getElementById('deleteRobots').disabled=c;"
/>Delete HTTP &amp; FTP Cache<br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" />Delete robots.txt Cache<br/>
<input type="checkbox" name="deleteSearchFl" id="deleteSearchFl" disabled="disabled" />Delete cached snippet-fetching failures during search<br/><br/><br/>
<br/><br/>
<input type="submit" name="deletecomplete" value="Delete" class="btn btn-danger"/>
</dd>
</dl>

@ -71,9 +71,6 @@ public class ConfigHTCache_p {
if ("on".equals(post.get("deleteRobots", ""))) {
sb.robots.clear();
}
if ("on".equals(post.get("deleteSearchFl", ""))) {
sb.tables.clear(WorkTables.TABLE_SEARCH_FAILURE_NAME);
}
}
prop.put("HTCachePath", env.getConfig(SwitchboardConstants.HTCACHE_PATH, SwitchboardConstants.HTCACHE_PATH_DEFAULT));

@ -98,7 +98,7 @@ function updatepage(str) {
<dl>
<dt class="TableCellDark">Index Deletion</dt>
<dd><input type="checkbox" name="deleteIndex" id="deleteIndex"
onclick="x=document.getElementById('deleteIndex').checked;#(rwi)#::document.getElementById('deleteRWI').checked=x;#(/rwi)#document.getElementById('deleteRobots').checked=x;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteCrawlQueues').checked=x;c='disabled';document.getElementById('deleteSearchFl').checked=x;if(x){c='';};document.getElementById('deletecomplete').disabled=c;document.getElementById('deleteCache').disabled=c;document.getElementById('deleteFirstSeen').disabled=c;document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteCrawlQueues').disabled=c;document.getElementById('deleteSearchFl').disabled=c;"
onclick="x=document.getElementById('deleteIndex').checked;#(rwi)#::document.getElementById('deleteRWI').checked=x;#(/rwi)#document.getElementById('deleteRobots').checked=x;document.getElementById('deleteRobots').checked=x;document.getElementById('deleteCrawlQueues').checked=x;c='disabled';if(x){c='';};document.getElementById('deletecomplete').disabled=c;document.getElementById('deleteCache').disabled=c;document.getElementById('deleteFirstSeen').disabled=c;document.getElementById('deleteRobots').disabled=c;document.getElementById('deleteCrawlQueues').disabled=c;"
/> Delete local search index (embedded Solr and old Metadata)<br/>
#(cleanupsolr)#::<input type="checkbox" name="deleteRemoteSolr" id="deleteRemoteSolr" onclick="x=document.getElementById('deleteRemoteSolr').checked;c='disabled';if(x){c='';};document.getElementById('deletecomplete').disabled=c;" /> Delete remote solr index<br/>#(/cleanupsolr)#
#(cleanuprwi)#::<input type="checkbox" name="deleteRWI" id="deleteRWI" onclick="x=document.getElementById('deleteRWI').checked;c='disabled';if(x){c='';};document.getElementById('deletecomplete').disabled=c;" /> Delete RWI Index (DHT transmission words)<br/>#(/cleanuprwi)#
@ -107,7 +107,6 @@ function updatepage(str) {
<input type="checkbox" name="deleteCache" id="deleteCache" disabled="disabled" /> Delete HTTP &amp; FTP Cache<br/>
<input type="checkbox" name="deleteCrawlQueues" id="deleteCrawlQueues" disabled="disabled" /> Stop Crawler and delete Crawl Queues<br/>
<input type="checkbox" name="deleteRobots" id="deleteRobots" disabled="disabled" /> Delete robots.txt Cache<br/>
<input type="checkbox" name="deleteSearchFl" id="deleteSearchFl" disabled="disabled" /> Delete cached snippet-fetching failures during search<br/><br/>
<input type="submit" name="deletecomplete" id="deletecomplete" value="Delete" disabled="disabled" class="btn btn-danger" style="width:240px;" onclick="return confirm('Confirm Deletion')"/>
</dd>
</dl>

@ -174,9 +174,6 @@ public class IndexControlURLs_p {
if ( post.get("deleteRobots", "").equals("on") ) {
try {sb.robots.clear();} catch (final IOException e) {}
}
if ( post.get("deleteSearchFl", "").equals("on") ) {
sb.tables.clear(WorkTables.TABLE_SEARCH_FAILURE_NAME);
}
post.remove("deletecomplete");
}

@ -87,12 +87,6 @@ public class WorkTables extends Tables {
public final static String TABLE_ACTIVECRAWLS_NAME = "crawljobsActive";
public final static String TABLE_PASSIVECRAWLS_NAME = "crawljobsPassive";
public final static String TABLE_SEARCH_FAILURE_NAME = "searchfl";
public final static String TABLE_SEARCH_FAILURE_COL_URL = "url";
public final static String TABLE_SEARCH_FAILURE_COL_DATE = "date";
public final static String TABLE_SEARCH_FAILURE_COL_WORDS = "words";
public final static String TABLE_SEARCH_FAILURE_COL_COMMENT = "comment";
public YMarkTables bookmarks;
public WorkTables(final File workPath) {
@ -353,52 +347,6 @@ public class WorkTables extends Tables {
indexCell.removeDelayed(word, url.hash());
}
}
// insert information about changed url into database
try {
// create and insert new entry
Data data = new Data();
byte[] date = UTF8.getBytes(GenericFormatter.SHORT_MILSEC_FORMATTER.format());
data.put(TABLE_SEARCH_FAILURE_COL_URL, url.toNormalform(true));
data.put(TABLE_SEARCH_FAILURE_COL_DATE, date);
data.put(TABLE_SEARCH_FAILURE_COL_WORDS, queryHashes.export());
data.put(TABLE_SEARCH_FAILURE_COL_COMMENT, UTF8.getBytes(reason));
super.insert(TABLE_SEARCH_FAILURE_NAME, url.hash(), data);
} catch (final IOException e) {
ConcurrentLog.logException(e);
}
}
public boolean failURLsContains(byte[] urlhash) {
try {
return super.has(TABLE_SEARCH_FAILURE_NAME, urlhash);
} catch (final IOException e) {
ConcurrentLog.logException(e);
return false;
}
}
/**
* cleanup cached failed searchs older then timeout
*/
public void cleanFailURLS(long timeout) {
if (timeout >= 0) {
try {
Row row;
Date date;
Iterator<Row> iter = this.iterator(WorkTables.TABLE_SEARCH_FAILURE_NAME);
while (iter.hasNext()) {
row = iter.next();
date = new Date();
date = row.get(TABLE_SEARCH_FAILURE_COL_DATE, date);
if(date.before(new Date(System.currentTimeMillis() - timeout))) {
this.delete(TABLE_SEARCH_FAILURE_NAME, row.getPK());
}
}
} catch (final IOException e) {
ConcurrentLog.logException(e);
}
}
}
public static Map<byte[], String> commentCache(Switchboard sb) {

@ -115,11 +115,6 @@ public class ResourceObserver {
// delete old releases
//if (getNormalizedDiskFree() == Space.AMPLE && getNormalizedDiskUsed(false) == Space.AMPLE) break;
// delete fetched snippets
log.info("DISK SPACE EXHAUSTED - deleting snippet cache");
sb.tables.clear(WorkTables.TABLE_SEARCH_FAILURE_NAME);
if (getNormalizedDiskFree() == Space.AMPLE && getNormalizedDiskUsed(false) == Space.AMPLE) break;
// clear HTCACHE
log.info("DISK SPACE EXHAUSTED - deleting HTCACHE");
Cache.clear();

@ -2409,12 +2409,6 @@ public final class Switchboard extends serverSwitch {
// after all clean up is done, check the resource usage
this.observer.resourceObserverJob();
// cleanup cached search failures
if ( getConfigBool(SwitchboardConstants.NETWORK_SEARCHVERIFY, false)
&& this.peers.mySeed().getFlagAcceptRemoteIndex() ) {
this.tables.cleanFailURLS(getConfigLong("cleanup.failedSearchURLtimeout", -1));
}
// clean up profiles
checkInterruption();

Loading…
Cancel
Save