diff --git a/.classpath b/.classpath
index fe06ef8b2..e8815cff4 100644
--- a/.classpath
+++ b/.classpath
@@ -1,18 +1,7 @@
-
-
-
-
-
-
-
-
-
-
-
diff --git a/htroot/api/blacklists/add_entry_p.java b/source/net/yacy/htroot/api/blacklists/add_entry_p.java
similarity index 97%
rename from htroot/api/blacklists/add_entry_p.java
rename to source/net/yacy/htroot/api/blacklists/add_entry_p.java
index 1ab0c8d40..a2653235f 100644
--- a/htroot/api/blacklists/add_entry_p.java
+++ b/source/net/yacy/htroot/api/blacklists/add_entry_p.java
@@ -1,3 +1,5 @@
+package net.yacy.htroot.api.blacklists;
+
import java.util.Locale;
import net.yacy.cora.protocol.RequestHeader;
@@ -30,7 +32,7 @@ public class add_entry_p {
"add_entry_p." + header.fileType().toString().toLowerCase(Locale.ROOT),
WorkTables.TABLE_API_TYPE_CONFIGURATION,
"add to blacklist '" + blacklistToUse + "': " + entry);
-
+
if (BlacklistHelper.addBlacklistEntry(blacklistToUse, entry)) {
prop.put(XML_ITEM_STATUS, RESULT_SUCCESS);
@@ -39,7 +41,7 @@ public class add_entry_p {
} else {
prop.put(XML_ITEM_STATUS, RESULT_FAILURE);
}
-
+
} else {
prop.put(XML_ITEM_STATUS, RESULT_FAILURE);
}
diff --git a/htroot/api/blacklists/delete_entry_p.java b/source/net/yacy/htroot/api/blacklists/delete_entry_p.java
similarity index 97%
rename from htroot/api/blacklists/delete_entry_p.java
rename to source/net/yacy/htroot/api/blacklists/delete_entry_p.java
index 9afa3b4f2..efdf1badc 100644
--- a/htroot/api/blacklists/delete_entry_p.java
+++ b/source/net/yacy/htroot/api/blacklists/delete_entry_p.java
@@ -1,3 +1,5 @@
+package net.yacy.htroot.api.blacklists;
+
import java.util.Locale;
import net.yacy.cora.protocol.RequestHeader;
@@ -40,7 +42,7 @@ public class delete_entry_p {
} else {
prop.put(XML_ITEM_STATUS, RESULT_FAILURE);
}
-
+
} else {
prop.put(XML_ITEM_STATUS, RESULT_FAILURE);
}
diff --git a/htroot/api/blacklists/get_list_p.java b/source/net/yacy/htroot/api/blacklists/get_list_p.java
similarity index 94%
rename from htroot/api/blacklists/get_list_p.java
rename to source/net/yacy/htroot/api/blacklists/get_list_p.java
index c250956db..43ed79730 100644
--- a/htroot/api/blacklists/get_list_p.java
+++ b/source/net/yacy/htroot/api/blacklists/get_list_p.java
@@ -1,3 +1,5 @@
+package net.yacy.htroot.api.blacklists;
+
import java.io.File;
import java.util.Collection;
@@ -30,7 +32,7 @@ public class get_list_p {
private static final int lastTypeIndex = BLACKLIST_TYPE_VALUES.length - 1;
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, @SuppressWarnings("unused") final serverSwitch env) {
-
+
final serverObjects prop = new serverObjects();
final Collection dirlist = FileUtils.getDirListing(ListManager.listsPath, Blacklist.BLACKLIST_FILENAME_FILTER);
@@ -40,23 +42,23 @@ public class get_list_p {
if (dirlist != null) {
for (final String element : dirlist) {
if (element.equals(blackListName)) {
-
+
prop.putXML(NAME, element);
prop.put(SHARED, ListManager.listSetContains(BLACK_LISTS_SHARED, element));
-
+
int j = 0;
for (final BlacklistType type : BLACKLIST_TYPE_VALUES) {
prop.putXML(PREFIX_TYPES + j + POSTFIX_NAME, type.toString());
prop.put(PREFIX_TYPES + j + POSTFIX_VALUE,
ListManager.listSetContains(type + TYPES_EXT, element));
-
+
prop.put(PREFIX_TYPES + j + POSTFIX_COMMA, j < lastTypeIndex);
-
+
j++;
}
prop.put(TYPES, BlacklistType.values().length);
-
+
prop.putXML(NAME, element);
final Collection list = FileUtils.getListArray(new File(ListManager.listsPath, element));
@@ -68,18 +70,18 @@ public class get_list_p {
if (entry.charAt(0) == '#') continue;
prop.putXML(PREFIX_ITEMS + count + POSTFIX_ITEM, entry);
-
+
prop.put(PREFIX_ITEMS + count + POSTFIX_COMMA, count < lastItemCount);
-
+
count++;
}
prop.put(ITEMS, count);
}
}
}
-
-
+
+
return prop;
}
-
+
}
diff --git a/htroot/api/blacklists/get_metadata_p.java b/source/net/yacy/htroot/api/blacklists/get_metadata_p.java
similarity index 98%
rename from htroot/api/blacklists/get_metadata_p.java
rename to source/net/yacy/htroot/api/blacklists/get_metadata_p.java
index 59c8fe1f0..5cc23d3fa 100644
--- a/htroot/api/blacklists/get_metadata_p.java
+++ b/source/net/yacy/htroot/api/blacklists/get_metadata_p.java
@@ -1,3 +1,5 @@
+package net.yacy.htroot.api.blacklists;
+
import java.util.Collection;
import net.yacy.cora.protocol.RequestHeader;
@@ -46,13 +48,13 @@ public class get_metadata_p {
ListManager.listSetContains(type + TYPES_EXT, element));
prop.put(PREFIX_LISTS + blacklistCount + INFIX_TYPES + j + POSTFIX_COMMA, j < lastTypeIndex);
-
+
j++;
}
prop.put(PREFIX_LISTS + blacklistCount + POSTFIX_TYPES, BLACKLIST_TYPE_VALUES.length);
prop.put(PREFIX_LISTS + blacklistCount + POSTFIX_COMMA, blacklistCount < lastBlacklistCount);
-
+
blacklistCount++;
}
}
diff --git a/htroot/api/blacklists.java b/source/net/yacy/htroot/api/blacklists_.java
similarity index 97%
rename from htroot/api/blacklists.java
rename to source/net/yacy/htroot/api/blacklists_.java
index 75232cd02..a1f434869 100644
--- a/htroot/api/blacklists.java
+++ b/source/net/yacy/htroot/api/blacklists_.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api;
+
import java.io.File;
import java.util.List;
@@ -8,7 +10,7 @@ import net.yacy.kelondro.util.FileUtils;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
-public class blacklists {
+public class blacklists_ {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, @SuppressWarnings("unused") final serverSwitch env) {
final serverObjects prop = new serverObjects();
diff --git a/htroot/api/blacklists_p.java b/source/net/yacy/htroot/api/blacklists_p.java
similarity index 98%
rename from htroot/api/blacklists_p.java
rename to source/net/yacy/htroot/api/blacklists_p.java
index 07910ebee..453b419f3 100644
--- a/htroot/api/blacklists_p.java
+++ b/source/net/yacy/htroot/api/blacklists_p.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api;
+
import java.io.File;
import java.util.List;
diff --git a/htroot/api/bookmarks/get_bookmarks.java b/source/net/yacy/htroot/api/bookmarks/get_bookmarks.java
similarity index 98%
rename from htroot/api/bookmarks/get_bookmarks.java
rename to source/net/yacy/htroot/api/bookmarks/get_bookmarks.java
index e3d6d20f4..fc52cbc96 100644
--- a/htroot/api/bookmarks/get_bookmarks.java
+++ b/source/net/yacy/htroot/api/bookmarks/get_bookmarks.java
@@ -1,5 +1,7 @@
+package net.yacy.htroot.api.bookmarks;
+
import java.util.Date;
import java.util.Iterator;
@@ -155,8 +157,8 @@ public class get_bookmarks {
}
count = 0;
while (count < itemsPerPage && it.hasNext()) {
-
- BookmarksDB.Bookmark bookmark = sb.bookmarksDB.getBookmark(it.next());
+
+ final BookmarksDB.Bookmark bookmark = sb.bookmarksDB.getBookmark(it.next());
if (bookmark != null) {
prop.put("display_bookmarks_"+count+"_id",count);
prop.put("display_bookmarks_"+count+"_link",bookmark.getUrl());
@@ -250,7 +252,7 @@ public class get_bookmarks {
private static int print_XBEL(final Iterator bit, int count) {
Date date;
while(bit.hasNext()){
- BookmarksDB.Bookmark bookmark = sb.bookmarksDB.getBookmark(bit.next());
+ final BookmarksDB.Bookmark bookmark = sb.bookmarksDB.getBookmark(bit.next());
if (bookmark != null) {
date = new Date(bookmark.getTimeStamp());
prop.put("display_xbel_"+count+"_elements", " bookmark_hashes = switchboard.bookmarksDB.getDate(Long.toString(parsedDate.getTime())).getBookmarkList();
for (final String bookmark_hash : bookmark_hashes){
- Bookmark bookmark = switchboard.bookmarksDB.getBookmark(bookmark_hash);
+ final Bookmark bookmark = switchboard.bookmarksDB.getBookmark(bookmark_hash);
if (bookmark != null) {
if (ISO8601Formatter.FORMATTER.format(new Date(bookmark.getTimeStamp())).equals(date) &&
tag==null || bookmark.getTags().contains(tag) &&
diff --git a/htroot/api/bookmarks/tags/addTag_p.java b/source/net/yacy/htroot/api/bookmarks/tags/addTag_p.java
similarity index 90%
rename from htroot/api/bookmarks/tags/addTag_p.java
rename to source/net/yacy/htroot/api/bookmarks/tags/addTag_p.java
index 25ce4459d..6a259f0bc 100644
--- a/htroot/api/bookmarks/tags/addTag_p.java
+++ b/source/net/yacy/htroot/api/bookmarks/tags/addTag_p.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api.bookmarks.tags;
+
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.data.BookmarksDB.Bookmark;
import net.yacy.search.Switchboard;
@@ -21,7 +23,7 @@ public class addTag_p {
switchboard.bookmarksDB.addTag(post.get("selectTag"), post.get("addTag"));
prop.put("result", "1");//success
} else if (post.containsKey("urlhash") && post.containsKey("addTag")) {
- Bookmark bm = switchboard.bookmarksDB.getBookmark(post.get("urlhash"));
+ final Bookmark bm = switchboard.bookmarksDB.getBookmark(post.get("urlhash"));
if (bm != null) {
bm.addTag(post.get("addTag"));
prop.put("result", "1");//success
diff --git a/htroot/api/bookmarks/tags/editTag_p.java b/source/net/yacy/htroot/api/bookmarks/tags/editTag_p.java
similarity index 95%
rename from htroot/api/bookmarks/tags/editTag_p.java
rename to source/net/yacy/htroot/api/bookmarks/tags/editTag_p.java
index e2507dd13..6492ffddd 100644
--- a/htroot/api/bookmarks/tags/editTag_p.java
+++ b/source/net/yacy/htroot/api/bookmarks/tags/editTag_p.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api.bookmarks.tags;
+
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
diff --git a/htroot/api/bookmarks/tags/getTag.java b/source/net/yacy/htroot/api/bookmarks/tags/getTag.java
similarity index 98%
rename from htroot/api/bookmarks/tags/getTag.java
rename to source/net/yacy/htroot/api/bookmarks/tags/getTag.java
index 6f26d0ecc..2ba3e9d7b 100644
--- a/htroot/api/bookmarks/tags/getTag.java
+++ b/source/net/yacy/htroot/api/bookmarks/tags/getTag.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api.bookmarks.tags;
+
import java.util.Iterator;
import net.yacy.cora.protocol.RequestHeader;
diff --git a/htroot/api/bookmarks/xbel/xbel.java b/source/net/yacy/htroot/api/bookmarks/xbel/xbel.java
similarity index 97%
rename from htroot/api/bookmarks/xbel/xbel.java
rename to source/net/yacy/htroot/api/bookmarks/xbel/xbel.java
index 4b779a4fb..4bd6e45b4 100644
--- a/htroot/api/bookmarks/xbel/xbel.java
+++ b/source/net/yacy/htroot/api/bookmarks/xbel/xbel.java
@@ -1,5 +1,7 @@
+package net.yacy.htroot.api.bookmarks.xbel;
+
import java.util.Date;
import java.util.Iterator;
@@ -107,7 +109,7 @@ public class xbel {
private static int print_XBEL(final Iterator bit, int count) {
Date date;
while(bit.hasNext()){
- BookmarksDB.Bookmark bookmark=switchboard.bookmarksDB.getBookmark(bit.next());
+ final BookmarksDB.Bookmark bookmark=switchboard.bookmarksDB.getBookmark(bit.next());
if (bookmark != null) {
date=new Date(bookmark.getTimeStamp());
prop.put("xbel_"+count+"_elements", ".
*/
+package net.yacy.htroot.api;
import java.io.IOException;
import java.net.MalformedURLException;
@@ -96,7 +97,7 @@ public class citation {
u = sb.getURL(ASCII.getBytes(hash));
}
if (u != null) uri = new DigestURL(u);
- } catch (IOException e) {
+ } catch (final IOException e) {
ConcurrentLog.logException(e);
}
}
@@ -112,13 +113,14 @@ public class citation {
return prop;
}
@SuppressWarnings("unchecked")
+ final
ArrayList title = (ArrayList) doc.getFieldValue(CollectionSchema.title.getSolrFieldName());
- String text = (String) doc.getFieldValue(CollectionSchema.text_t.getSolrFieldName());
+ final String text = (String) doc.getFieldValue(CollectionSchema.text_t.getSolrFieldName());
- ArrayList sentences = new ArrayList();
- if (title != null) for (String s: title) if (s.length() > 0) sentences.add(s);
+ final ArrayList sentences = new ArrayList();
+ if (title != null) for (final String s: title) if (s.length() > 0) sentences.add(s);
if (text != null && !text.isEmpty()) {
- SentenceReader sr = new SentenceReader(text);
+ final SentenceReader sr = new SentenceReader(text);
StringBuilder line;
while (sr.hasNext()) {
line = sr.next();
@@ -127,8 +129,8 @@ public class citation {
}
// for each line make a statistic about the number of occurrences somewhere else
- OrderedScoreMap scores = new OrderedScoreMap(null); // accumulates scores for citating urls
- LinkedHashMap> sentenceOcc = new LinkedHashMap>();
+ final OrderedScoreMap scores = new OrderedScoreMap(null); // accumulates scores for citating urls
+ final LinkedHashMap> sentenceOcc = new LinkedHashMap>();
for (String sentence: sentences) {
if (sentence == null || sentence.length() < 40) {
// do not count the very short sentences
@@ -137,12 +139,12 @@ public class citation {
}
try {
sentence = sentence.replace('"', '\'');
- SolrDocumentList doclist = connector.getDocumentListByQuery("text_t:\"" + sentence + "\"", CollectionSchema.url_chars_i.getSolrFieldName() + " asc", 0, 100, CollectionSchema.sku.getSolrFieldName());
- int count = (int) doclist.getNumFound();
+ final SolrDocumentList doclist = connector.getDocumentListByQuery("text_t:\"" + sentence + "\"", CollectionSchema.url_chars_i.getSolrFieldName() + " asc", 0, 100, CollectionSchema.sku.getSolrFieldName());
+ final int count = (int) doclist.getNumFound();
if (count > 0) {
- Set list = new TreeSet();
- for (SolrDocument d: doclist) {
- String u = (String) d.getFieldValue(CollectionSchema.sku.getSolrFieldName());
+ final Set list = new TreeSet();
+ for (final SolrDocument d: doclist) {
+ final String u = (String) d.getFieldValue(CollectionSchema.sku.getSolrFieldName());
if (u == null || u.equals(url)) continue;
scores.inc(u);
try {list.add(new DigestURL(u, null));} catch (final MalformedURLException e) {}
@@ -158,14 +160,14 @@ public class citation {
// iterate the sentences
int i = 0;
int sentenceNr = 0;
- for (Map.Entry> se: sentenceOcc.entrySet()) {
- Set app = se.getValue();
+ for (final Map.Entry> se: sentenceOcc.entrySet()) {
+ final Set app = se.getValue();
if (filter) { // prepare list, only include sentence with citation
if (app != null && app.size() > 0) {
- StringBuilder dd = new StringBuilder(se.getKey());
+ final StringBuilder dd = new StringBuilder(se.getKey());
prop.put("sentences_" + i + "_dt", sentenceNr);
dd.append("
appears in:");
- for (DigestURL u : app) {
+ for (final DigestURL u : app) {
if (u != null) {
dd.append(" ").append(u.getHost()).append("");
}
@@ -174,11 +176,11 @@ public class citation {
i++;
}
} else { // prepare list, include all sentences
- StringBuilder dd = new StringBuilder(se.getKey());
+ final StringBuilder dd = new StringBuilder(se.getKey());
prop.put("sentences_" + i + "_dt", sentenceNr);
if (app != null && app.size() > 0) {
dd.append("
appears in:");
- for (DigestURL u : app) {
+ for (final DigestURL u : app) {
if (u != null) {
dd.append(" ").append(u.getHost()).append("");
}
@@ -193,14 +195,14 @@ public class citation {
// iterate the citations in order of number of citations
i = 0;
- for (String u: scores.keyList(false)) {
+ for (final String u: scores.keyList(false)) {
try {
- DigestURL uu = new DigestURL(u, null);
+ final DigestURL uu = new DigestURL(u, null);
prop.put("citations_" + i + "_dt", "" + u + "");
- StringBuilder dd = new StringBuilder();
+ final StringBuilder dd = new StringBuilder();
dd.append("makes ").append(Integer.toString(scores.get(u))).append(" citations: of ").append(url);
- for (Map.Entry> se: sentenceOcc.entrySet()) {
- Set occurls = se.getValue();
+ for (final Map.Entry> se: sentenceOcc.entrySet()) {
+ final Set occurls = se.getValue();
if (occurls != null && occurls.contains(uu)) dd.append("
").append(se.getKey()).append("");
}
prop.put("citations_" + i + "_dd", dd.toString());
@@ -211,10 +213,10 @@ public class citation {
// find similar documents from different hosts
i = 0;
- for (String u: scores.keyList(false)) {
+ for (final String u: scores.keyList(false)) {
if (scores.get(u) < ch) continue;
try {
- DigestURL uu = new DigestURL(u, null);
+ final DigestURL uu = new DigestURL(u, null);
if (uu.getOrganization().equals(uri.getOrganization())) continue;
prop.put("similar_links_" + i + "_url", u);
i++;
diff --git a/htroot/api/config_p.java b/source/net/yacy/htroot/api/config_p.java
similarity index 97%
rename from htroot/api/config_p.java
rename to source/net/yacy/htroot/api/config_p.java
index 146f86684..a9db74d4d 100644
--- a/htroot/api/config_p.java
+++ b/source/net/yacy/htroot/api/config_p.java
@@ -1,4 +1,6 @@
+package net.yacy.htroot.api;
+
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
diff --git a/htroot/api/feed.java b/source/net/yacy/htroot/api/feed.java
similarity index 99%
rename from htroot/api/feed.java
rename to source/net/yacy/htroot/api/feed.java
index 1c853085d..e4362e105 100644
--- a/htroot/api/feed.java
+++ b/source/net/yacy/htroot/api/feed.java
@@ -1,5 +1,7 @@
+package net.yacy.htroot.api;
+
import java.util.Date;
import java.util.List;
diff --git a/htroot/api/getpageinfo.java b/source/net/yacy/htroot/api/getpageinfo.java
similarity index 93%
rename from htroot/api/getpageinfo.java
rename to source/net/yacy/htroot/api/getpageinfo.java
index 9763adb7a..1110a1f4d 100644
--- a/htroot/api/getpageinfo.java
+++ b/source/net/yacy/htroot/api/getpageinfo.java
@@ -24,6 +24,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.util.List;
import java.util.Map.Entry;
@@ -40,7 +42,7 @@ public class getpageinfo {
@SuppressWarnings("unused")
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
-
+
/* Redirect to getpageinfo_p */
StringBuilder redirectedLocation;
if(header != null && header.getPathInfo() != null && header.getPathInfo().endsWith(".json")) {
@@ -48,20 +50,20 @@ public class getpageinfo {
} else {
redirectedLocation = new StringBuilder("getpageinfo_p.xml");
}
-
+
/* Append eventual request parameters to the redirected location */
if (post != null) {
- List> parameters = post.entrySet();
+ final List> parameters = post.entrySet();
if (parameters != null && !parameters.isEmpty()) {
redirectedLocation.append("?");
- for (Entry entry : parameters) {
+ for (final Entry entry : parameters) {
redirectedLocation.append(entry.getKey()).append("=").append(entry.getValue()).append("&");
}
/* Remove trailing "&" */
redirectedLocation.setLength(redirectedLocation.length() - 1);
}
}
-
+
prop.put(serverObjects.ACTION_LOCATION, redirectedLocation.toString());
return prop;
}
diff --git a/htroot/api/getpageinfo_p.java b/source/net/yacy/htroot/api/getpageinfo_p.java
similarity index 94%
rename from htroot/api/getpageinfo_p.java
rename to source/net/yacy/htroot/api/getpageinfo_p.java
index a94a4ae1c..68a1d7278 100644
--- a/htroot/api/getpageinfo_p.java
+++ b/source/net/yacy/htroot/api/getpageinfo_p.java
@@ -24,6 +24,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.io.IOException;
import java.net.MalformedURLException;
import java.util.ArrayList;
@@ -61,7 +63,7 @@ public class getpageinfo_p {
/**
* Scrape and parse a resource at a specified URL to provide some information, depending on the requested actions.
- *
+ *
*
* Example API calls :
*
@@ -70,8 +72,8 @@ public class getpageinfo_p {
* - Only check for an OAI Repository at CiteSeerX : http://localhost:8090/api/getpageinfo_p.xml?url=http://citeseerx.ist.psu.edu/oai2&actions=oai
*
*
- *
- *
+ *
+ *
* @param header
* servlet request header
* @param post
@@ -82,7 +84,7 @@ public class getpageinfo_p {
*
* - title : look for the resource title, description, language, icons, keywords, and links
* - robots : check if crawling the resource is allowed by the eventual robots.txt policy file, and also if this file exposes sitemap(s) URLs.
- * - oai : send an "Identify" OAI-PMH request (http://www.openarchives.org/OAI/openarchivesprotocol.html#Identify)
+ *
- oai : send an "Identify" OAI-PMH request (http://www.openarchives.org/OAI/openarchivesprotocol.html#Identify)
* at the URL to check for a OAI-PMH response from an Open Archive Initiative Repository
*
*
@@ -139,14 +141,14 @@ public class getpageinfo_p {
}
net.yacy.document.Document scraper = null;
if (u != null) try {
- ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
-
+ final ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
+
if(post.containsKey("maxBytes")) {
/* A maxBytes limit is specified : let's try to parse only the amount of bytes given */
final long maxBytes = post.getLong("maxBytes", sb.loader.protocolMaxFileSize(u));
scraper = sb.loader.loadDocumentAsLimitedStream(u, CacheStrategy.IFEXIST, BlacklistType.CRAWLER, agent, maxLinks, maxBytes);
} else {
- /* No maxBytes limit : apply regular parsing with default crawler limits.
+ /* No maxBytes limit : apply regular parsing with default crawler limits.
* Eventual maxLinks limit will apply after loading and parsing the document. */
scraper = sb.loader.loadDocumentAsStream(u, CacheStrategy.IFEXIST, BlacklistType.CRAWLER, agent);
}
@@ -161,9 +163,9 @@ public class getpageinfo_p {
prop.putXML("title", scraper.dc_title());
// put the icons that belong to the document
- Set iconURLs = scraper.getIcons().keySet();
+ final Set iconURLs = scraper.getIcons().keySet();
long count = 0;
- for (DigestURL iconURL : iconURLs) {
+ for (final DigestURL iconURL : iconURLs) {
if(count >= maxLinks) {
break;
}
@@ -200,7 +202,7 @@ public class getpageinfo_p {
count = 0;
final Iterator urisIt = uris.iterator();
while (urisIt.hasNext()) {
- AnchorURL uri = urisIt.next();
+ final AnchorURL uri = urisIt.next();
if (uri == null) continue;
if(count >= maxLinks) {
break;
@@ -221,15 +223,15 @@ public class getpageinfo_p {
final DigestURL theURL = new DigestURL(url);
// determine if crawling of the current URL is allowed
- ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
- RobotsTxtEntry robotsEntry = sb.robots.getEntry(theURL, agent);
+ final ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
+ final RobotsTxtEntry robotsEntry = sb.robots.getEntry(theURL, agent);
prop.put("robots-allowed", robotsEntry == null ? 1 : robotsEntry.isDisallowed(theURL) ? 0 : 1);
prop.putHTML("robotsInfo", robotsEntry == null ? "" : robotsEntry.getInfo());
// get the sitemap URL(s) of the domain
final List sitemaps = robotsEntry == null ? new ArrayList(0) : robotsEntry.getSitemaps();
int count = 0;
- for (String sitemap : sitemaps) {
+ for (final String sitemap : sitemaps) {
if(count >= maxLinks) {
break;
}
diff --git a/htroot/api/latency_p.java b/source/net/yacy/htroot/api/latency_p.java
similarity index 88%
rename from htroot/api/latency_p.java
rename to source/net/yacy/htroot/api/latency_p.java
index 7636e6332..c799a32d5 100644
--- a/htroot/api/latency_p.java
+++ b/source/net/yacy/htroot/api/latency_p.java
@@ -21,6 +21,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.util.Date;
import java.util.Iterator;
import java.util.Map;
@@ -43,7 +45,7 @@ public class latency_p {
Map.Entry e;
int c = 0;
Latency.Host host;
- ClientIdentification.Agent agent = post == null ? ClientIdentification.yacyInternetCrawlerAgent : ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
+ final ClientIdentification.Agent agent = post == null ? ClientIdentification.yacyInternetCrawlerAgent : ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
while (i.hasNext()) {
e = i.next();
host = e.getValue();
diff --git a/htroot/api/linkstructure.java b/source/net/yacy/htroot/api/linkstructure.java
similarity index 79%
rename from htroot/api/linkstructure.java
rename to source/net/yacy/htroot/api/linkstructure.java
index 62e85f92f..e2c69517b 100644
--- a/htroot/api/linkstructure.java
+++ b/source/net/yacy/htroot/api/linkstructure.java
@@ -17,6 +17,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.io.IOException;
import java.net.MalformedURLException;
@@ -46,25 +48,25 @@ public class linkstructure {
final boolean xml = ext.equals("xml");
final Switchboard sb = (Switchboard) env;
- Fulltext fulltext = sb.index.fulltext();
+ final Fulltext fulltext = sb.index.fulltext();
if (post == null) return prop;
- boolean authenticated = sb.adminAuthenticated(header) >= 2;
- int maxtime = Math.min(post.getInt("maxtime", 60000), authenticated ? 300000 : 1000);
- int maxnodes = Math.min(post.getInt("maxnodes", 10000), authenticated ? 10000000 : 100);
- HyperlinkGraph hlg = new HyperlinkGraph();
+ final boolean authenticated = sb.adminAuthenticated(header) >= 2;
+ final int maxtime = Math.min(post.getInt("maxtime", 60000), authenticated ? 300000 : 1000);
+ final int maxnodes = Math.min(post.getInt("maxnodes", 10000), authenticated ? 10000000 : 100);
+ final HyperlinkGraph hlg = new HyperlinkGraph();
int maxdepth = 0;
if (post.get("about", null) != null) try {
// get link structure within a host
- String about = post.get("about", null); // may be a URL, a URL hash or a domain hash
+ final String about = post.get("about", null); // may be a URL, a URL hash or a domain hash
DigestURL url = null;
String hostname = null;
if (about.length() == 12 && Base64Order.enhancedCoder.wellformed(ASCII.getBytes(about))) {
- byte[] urlhash = ASCII.getBytes(about);
+ final byte[] urlhash = ASCII.getBytes(about);
try {
- String u = authenticated ? sb.getURL(urlhash) : null;
+ final String u = authenticated ? sb.getURL(urlhash) : null;
url = u == null ? null : new DigestURL(u);
- } catch (IOException e) {
+ } catch (final IOException e) {
ConcurrentLog.logException(e);
}
} else if (url == null && about.length() > 0) { // consider "about" as url or hostname
@@ -79,8 +81,8 @@ public class linkstructure {
} catch (final MalformedURLException e) {}
else if (post.get("to", null) != null) try {
// get link structure between two links
- DigestURL to = new DigestURL(post.get("to", null), null); // must be an url
- DigestURL from = post.get("from", null) == null ? null : new DigestURL(post.get("from", null)); // can be null or must be an url
+ final DigestURL to = new DigestURL(post.get("to", null), null); // must be an url
+ final DigestURL from = post.get("from", null) == null ? null : new DigestURL(post.get("from", null)); // can be null or must be an url
hlg.path(sb.index, from, to, maxtime, maxnodes);
} catch (final MalformedURLException e) {}
@@ -100,12 +102,12 @@ public class linkstructure {
private static void writeGraph(final servletProperties prop, final HyperlinkGraph hlg, final int maxdepth) {
int c = 0;
- for (HyperlinkEdge e: hlg) {
+ for (final HyperlinkEdge e: hlg) {
prop.putJSON("edges_" + c + "_source", e.source.getPath());
prop.putJSON("edges_" + c + "_target", e.target.type.equals(HyperlinkType.Outbound) ? e.target.toNormalform(true) : e.target.getPath());
prop.putJSON("edges_" + c + "_type", e.target.type.name());
- Integer depth_source = hlg.getDepth(e.source);
- Integer depth_target = hlg.getDepth(e.target);
+ final Integer depth_source = hlg.getDepth(e.source);
+ final Integer depth_target = hlg.getDepth(e.target);
prop.put("edges_" + c + "_depthSource", depth_source == null ? -1 : depth_source.intValue());
prop.put("edges_" + c + "_depthTarget", depth_target == null ? -1 : depth_target.intValue());
c++;
diff --git a/htroot/api/push_p.java b/source/net/yacy/htroot/api/push_p.java
similarity index 79%
rename from htroot/api/push_p.java
rename to source/net/yacy/htroot/api/push_p.java
index 9aca72451..657a2e15f 100644
--- a/htroot/api/push_p.java
+++ b/source/net/yacy/htroot/api/push_p.java
@@ -7,17 +7,19 @@
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
- *
+ *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see .
*/
+package net.yacy.htroot.api;
+
import java.net.MalformedURLException;
import java.util.Date;
@@ -38,15 +40,15 @@ import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
public class push_p {
-
+
// test: http://localhost:8090/api/push_p.json?count=1&synchronous=false&commit=false&url-0=http://nowhere.cc/example.txt&data-0=%22hello%20world%22&lastModified-0=Tue,%2015%20Nov%201994%2012:45:26%20GMT&contentType-0=text/plain&collection-0=testpush&responseHeader-0=
-
+
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects();
// display mode: this only helps to display a nice input form for test cases
- int c = post == null ? 1 : post.getInt("c", 0);
+ final int c = post == null ? 1 : post.getInt("c", 0);
if (c > 0) {
prop.put("mode", 0);
for (int i = 0; i < c; i++) prop.put("mode_input_" + i + "_count", i);
@@ -54,45 +56,45 @@ public class push_p {
prop.put("mode_count", c);
return prop;
}
-
+
// push mode: this does a document upload
prop.put("mode", 1);
if (post == null) return prop;
- boolean commit = post.getBoolean("commit");
- boolean synchronous = commit || post.getBoolean("synchronous");
- int count = post.getInt("count", 0);
+ final boolean commit = post.getBoolean("commit");
+ final boolean synchronous = commit || post.getBoolean("synchronous");
+ final int count = post.getInt("count", 0);
boolean successall = true;
int countsuccess = 0;
int countfail = 0;
for (int i = 0; i < count; i++) {
try {
prop.put("mode_results_" + i + "_item", i);
- String u = post.get("url-" + i, "");
+ final String u = post.get("url-" + i, "");
prop.put("mode_results_" + i + "_url", u);
- DigestURL url = new DigestURL(u);
- String collection = post.get("collection-" + i, "");
- String lastModified = post.get("lastModified-" + i, ""); // must be in RFC1123 format
- String contentType = post.get("contentType-" + i, "");
- String data64 = post.get("data-" + i + "$file", ""); // multi-file uploads are all base64-encoded in YaCyDefaultServlet.parseMultipart
+ final DigestURL url = new DigestURL(u);
+ final String collection = post.get("collection-" + i, "");
+ final String lastModified = post.get("lastModified-" + i, ""); // must be in RFC1123 format
+ final String contentType = post.get("contentType-" + i, "");
+ final String data64 = post.get("data-" + i + "$file", ""); // multi-file uploads are all base64-encoded in YaCyDefaultServlet.parseMultipart
byte[] data = Base64Order.standardCoder.decode(data64);
if ((data == null || data.length == 0) && data64.length() > 0) data = UTF8.getBytes(data64); // for test cases
-
+
// create response header
final ResponseHeader responseHeader = new ResponseHeader(200);
responseHeader.put(HeaderFramework.LAST_MODIFIED, lastModified);
responseHeader.put(HeaderFramework.CONTENT_TYPE, contentType);
responseHeader.put(HeaderFramework.CONTENT_LENGTH, Long.toString(data.length));
// add generic fields
- String[] responseHeaderMap = post.getParams("responseHeader-" + i); // strings with key-value pairs; separated by ':'
- for (String kv: responseHeaderMap) {
- int p = kv.indexOf(':');
+ final String[] responseHeaderMap = post.getParams("responseHeader-" + i); // strings with key-value pairs; separated by ':'
+ for (final String kv: responseHeaderMap) {
+ final int p = kv.indexOf(':');
if (p < 0) continue;
- String key = kv.substring(0, p).trim();
- String value = kv.substring(p + 1).trim();
+ final String key = kv.substring(0, p).trim();
+ final String value = kv.substring(p + 1).trim();
responseHeader.put(key, value);
}
- CrawlProfile profile = sb.crawler.getPushCrawlProfile(collection);
-
+ final CrawlProfile profile = sb.crawler.getPushCrawlProfile(collection);
+
// create requests and artificial response
final Request request = new Request(
ASCII.getBytes(sb.peers.mySeed().hash),
@@ -103,15 +105,15 @@ public class push_p {
profile.handle(), // the name of the prefetch profile. This must not be null!
0, // forkfactor sum of anchors of all ancestors
profile.timezoneOffset());
- Response response = new Response(
+ final Response response = new Response(
request,
null,
responseHeader,
profile,
false, // from cache?
data); // content
- IndexingQueueEntry in = new IndexingQueueEntry(response, null, null);
-
+ final IndexingQueueEntry in = new IndexingQueueEntry(response, null, null);
+
if (synchronous) {
// synchronously process the content
sb.storeDocumentIndex(sb.webStructureAnalysis(sb.condenseDocument(sb.parseDocument(in))));
@@ -123,7 +125,7 @@ public class push_p {
prop.put("mode_results_" + i + "_success_message", YaCyDefaultServlet.getContext(header, sb) + "/solr/select?q=sku:%22" + u + "%22");
countsuccess++;
- } catch (MalformedURLException e) {
+ } catch (final MalformedURLException e) {
e.printStackTrace();
prop.put("mode_results_" + i + "_success", "0");
prop.put("mode_results_" + i + "_success_message", e.getMessage());
@@ -136,10 +138,10 @@ public class push_p {
prop.put("mode_count", count);
prop.put("mode_countsuccess", countsuccess);
prop.put("mode_countfail", countfail);
-
+
if (synchronous && commit) sb.index.fulltext().commit(true);
-
+
return prop;
}
-
+
}
diff --git a/htroot/api/schema.java b/source/net/yacy/htroot/api/schema.java
similarity index 89%
rename from htroot/api/schema.java
rename to source/net/yacy/htroot/api/schema.java
index 06d5b3c3a..875896a43 100644
--- a/htroot/api/schema.java
+++ b/source/net/yacy/htroot/api/schema.java
@@ -22,6 +22,8 @@
* If not, see .
*/
+package net.yacy.htroot.api;
+
import net.yacy.cora.federate.solr.SchemaConfiguration;
import net.yacy.cora.federate.solr.SchemaDeclaration;
import net.yacy.cora.protocol.HeaderFramework;
@@ -42,13 +44,13 @@ public class schema {
final Switchboard sb = (Switchboard) env;
String schemaName = CollectionSchema.CORE_NAME;
- if (post != null) schemaName = post.get("core", schemaName);
-
+ if (post != null) schemaName = post.get("core", schemaName);
+
// write schema
int c = 0;
- SchemaConfiguration solrSchema = schemaName.equals(CollectionSchema.CORE_NAME) ? sb.index.fulltext().getDefaultConfiguration() : sb.index.fulltext().getWebgraphConfiguration();
- SchemaDeclaration[] cc = schemaName.equals(CollectionSchema.CORE_NAME) ? CollectionSchema.values() : WebgraphSchema.values();
- for (SchemaDeclaration field : cc) {
+ final SchemaConfiguration solrSchema = schemaName.equals(CollectionSchema.CORE_NAME) ? sb.index.fulltext().getDefaultConfiguration() : sb.index.fulltext().getWebgraphConfiguration();
+ final SchemaDeclaration[] cc = schemaName.equals(CollectionSchema.CORE_NAME) ? CollectionSchema.values() : WebgraphSchema.values();
+ for (final SchemaDeclaration field : cc) {
if (solrSchema.contains(field.name())) {
addField(prop, c, field);
c++;
@@ -63,13 +65,13 @@ public class schema {
c++;
}
}
-
+
//if (solrScheme.contains(YaCySchema.author)) {addField(prop, c, YaCySchema.author_sxt);}
prop.put("fields", c);
if (schemaName.equals(CollectionSchema.CORE_NAME)) {
prop.put("copyFieldAuthor", solrSchema.contains(CollectionSchema.author) ? 1 : 0);
-
+
prop.put("solruniquekey", CollectionSchema.id.getSolrFieldName());
prop.put("solrdefaultsearchfield",
solrSchema.contains(CollectionSchema.text_t) ? CollectionSchema.text_t.getSolrFieldName() :
@@ -79,7 +81,7 @@ public class schema {
);
} else {
prop.put("copyFieldAuthor", 0);
-
+
prop.put("solruniquekey", WebgraphSchema.id.getSolrFieldName());
prop.put("solrdefaultsearchfield",
solrSchema.contains(WebgraphSchema.target_linktext_s) ? WebgraphSchema.target_linktext_s.getSolrFieldName() :
@@ -87,18 +89,18 @@ public class schema {
solrSchema.contains(WebgraphSchema.target_alt_s) ? WebgraphSchema.target_alt_s.getSolrFieldName() :
WebgraphSchema.id.getSolrFieldName()
);
- }
+ }
// add CORS Access header
final ResponseHeader outgoingHeader = new ResponseHeader(200);
outgoingHeader.put(HeaderFramework.CORS_ALLOW_ORIGIN, "*");
- prop.setOutgoingHeader(outgoingHeader);
-
+ prop.setOutgoingHeader(outgoingHeader);
+
// return rewrite properties
return prop;
}
-
- private static void addField(servletProperties prop, int c, SchemaDeclaration field) {
+
+ private static void addField(final servletProperties prop, final int c, final SchemaDeclaration field) {
prop.put("fields_" + c + "_solrname", field.getSolrFieldName());
prop.put("fields_" + c + "_type", field.getType().printName());
prop.put("fields_" + c + "_comment", field.getComment());
diff --git a/htroot/api/share.java b/source/net/yacy/htroot/api/share.java
similarity index 90%
rename from htroot/api/share.java
rename to source/net/yacy/htroot/api/share.java
index 167ebe75c..c2ac5cfb5 100644
--- a/htroot/api/share.java
+++ b/source/net/yacy/htroot/api/share.java
@@ -7,19 +7,21 @@
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
- *
+ *
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
- *
+ *
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see .
*/
-import java.io.File;
+package net.yacy.htroot.api;
+
import java.io.ByteArrayInputStream;
+import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
@@ -34,7 +36,7 @@ import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
public class share {
-
+
/**
* Servlet to share any kind of binary to this peer.
* That mean you can upload 'things'. While this is the generic view,
@@ -53,12 +55,12 @@ public class share {
final serverObjects prop = new serverObjects();
// display mode: this only helps to display a nice input form for test cases
- int c = post == null ? 1 : post.getInt("c", 0);
+ final int c = post == null ? 1 : post.getInt("c", 0);
if (c > 0) {
prop.put("mode", 0);
return prop;
}
-
+
// push mode: this does a document upload
prop.put("mode", 1);
prop.put("mode_success", 0);
@@ -66,9 +68,9 @@ public class share {
prop.put("mode_countsuccess", 0);
prop.put("mode_countfail", 0);
prop.put("mode_item", "");
-
+
if (post == null) return prop;
-
+
// check file name
String filename = post.get("data", "");
if (filename.isEmpty()) {
@@ -79,9 +81,9 @@ public class share {
prop.put("mode_success_message", "no index dump file (" + Fulltext.yacy_dump_prefix + "*.xml.gz)");
return prop;
}
-
+
// check data
- String dataString = post.get("data$file", "");
+ final String dataString = post.get("data$file", "");
if (dataString.length() == 0) return prop;
byte[] data;
if (filename.endsWith(".base64")) {
@@ -91,20 +93,20 @@ public class share {
data = UTF8.getBytes(dataString);
}
if (data == null || data.length == 0) return prop;
-
+
// modify the file name; ignore and replace the used transaction token
- int ttp = filename.indexOf("_t");
+ final int ttp = filename.indexOf("_t");
if (ttp < 0) return prop;
if (filename.charAt(ttp + 3) != '.') return prop;
filename = filename.substring(0, ttp) + "_ts" + filename.substring(ttp + 3); // transaction token: 's' as 'shared'.
-
+
// process the data
- File tmpFile = new File(yacy.shareDumpDefaultPath, filename + ".tmp");
- File finalFile = new File(yacy.shareDumpDefaultPath, filename);
+ final File tmpFile = new File(yacy.shareDumpDefaultPath, filename + ".tmp");
+ final File finalFile = new File(yacy.shareDumpDefaultPath, filename);
try {
Files.copy(new ByteArrayInputStream(data), tmpFile.toPath(), StandardCopyOption.REPLACE_EXISTING);
tmpFile.renameTo(finalFile);
- } catch (IOException e) {
+ } catch (final IOException e) {
ConcurrentLog.logException(e);
return prop;
}
@@ -112,5 +114,5 @@ public class share {
prop.put("mode_success", 1);
return prop;
}
-
+
}
diff --git a/htroot/api/snapshot.java b/source/net/yacy/htroot/api/snapshot.java
similarity index 78%
rename from htroot/api/snapshot.java
rename to source/net/yacy/htroot/api/snapshot.java
index a47475810..3475052df 100644
--- a/htroot/api/snapshot.java
+++ b/source/net/yacy/htroot/api/snapshot.java
@@ -18,6 +18,8 @@
* If not, see .
*/
+package net.yacy.htroot.api;
+
import java.awt.Container;
import java.awt.Image;
import java.awt.MediaTracker;
@@ -89,30 +91,30 @@ public class snapshot {
defaultResponse.authenticationRequired();
return defaultResponse;
}
- int maxcount = post == null ? 10 : post.getInt("maxcount", 10);
- int depthx = post == null ? -1 : post.getInt("depth", -1);
- Integer depth = depthx == -1 ? null : depthx;
- String orderx = post == null ? "ANY" : post.get("order", "ANY");
- Snapshots.Order order = Snapshots.Order.valueOf(orderx);
- String statex = post == null ? Transactions.State.INVENTORY.name() : post.get("state", Transactions.State.INVENTORY.name());
- Transactions.State state = Transactions.State.valueOf(statex);
- String host = post == null ? null : post.get("host");
- Map iddate = Transactions.select(host, depth, order, maxcount, state);
+ final int maxcount = post == null ? 10 : post.getInt("maxcount", 10);
+ final int depthx = post == null ? -1 : post.getInt("depth", -1);
+ final Integer depth = depthx == -1 ? null : depthx;
+ final String orderx = post == null ? "ANY" : post.get("order", "ANY");
+ final Snapshots.Order order = Snapshots.Order.valueOf(orderx);
+ final String statex = post == null ? Transactions.State.INVENTORY.name() : post.get("state", Transactions.State.INVENTORY.name());
+ final Transactions.State state = Transactions.State.valueOf(statex);
+ final String host = post == null ? null : post.get("host");
+ final Map iddate = Transactions.select(host, depth, order, maxcount, state);
// now select the URL from the index for these ids in iddate and make an RSS feed
- RSSFeed rssfeed = new RSSFeed(Integer.MAX_VALUE);
+ final RSSFeed rssfeed = new RSSFeed(Integer.MAX_VALUE);
rssfeed.setChannel(new RSSMessage("Snapshot list for host = " + host + ", depth = " + depth + ", order = " + order + ", maxcount = " + maxcount, "", ""));
- for (Map.Entry e: iddate.entrySet()) {
+ for (final Map.Entry e: iddate.entrySet()) {
try {
- String u = e.getValue().url == null ? sb.index.fulltext().getURL(e.getKey()) : e.getValue().url;
+ final String u = e.getValue().url == null ? sb.index.fulltext().getURL(e.getKey()) : e.getValue().url;
if (u == null) continue;
- RSSMessage message = new RSSMessage(u, "", new DigestURL(u), e.getKey());
+ final RSSMessage message = new RSSMessage(u, "", new DigestURL(u), e.getKey());
message.setPubDate(e.getValue().dates[0]);
rssfeed.addMessage(message);
- } catch (IOException ee) {
+ } catch (final IOException ee) {
ConcurrentLog.logException(ee);
}
}
- byte[] rssBinary = UTF8.getBytes(rssfeed.toString());
+ final byte[] rssBinary = UTF8.getBytes(rssfeed.toString());
return new ByteArrayInputStream(rssBinary);
}
@@ -126,34 +128,34 @@ public class snapshot {
}
final boolean pngjpg = ext.equals("png") || ext.equals(DEFAULT_EXT);
String urlhash = post.get("urlhash", "");
- String url = post.get("url", "");
+ final String url = post.get("url", "");
DigestURL durl = null;
if (urlhash.length() == 0 && url.length() > 0) {
try {
durl = new DigestURL(url);
urlhash = ASCII.String(durl.hash());
- } catch (MalformedURLException e) {
+ } catch (final MalformedURLException e) {
}
}
if (durl == null && urlhash.length() > 0) {
try {
- String u = sb.index.fulltext().getURL(urlhash);
+ final String u = sb.index.fulltext().getURL(urlhash);
durl = u == null ? null : new DigestURL(u);
- } catch (IOException e) {
+ } catch (final IOException e) {
ConcurrentLog.logException(e);
}
}
if (ext.equals("json")) {
// command interface: view and change a transaction state, get metadata about transactions in the past
- String command = post.get("command", "metadata");
- String statename = post.get("state");
- JSONObject result = new JSONObject();
+ final String command = post.get("command", "metadata");
+ final String statename = post.get("state");
+ final JSONObject result = new JSONObject();
try {
if (command.equals("status")) {
// return a status of the transaction archive
- JSONObject sizes = new JSONObject();
- for (Map.Entry state: Transactions.sizes().entrySet()) sizes.put(state.getKey(), state.getValue());
+ final JSONObject sizes = new JSONObject();
+ for (final Map.Entry state: Transactions.sizes().entrySet()) sizes.put(state.getKey(), state.getValue());
result.put("size", sizes);
} else if (command.equals("list")) {
if (!authenticated) {
@@ -161,36 +163,36 @@ public class snapshot {
return defaultResponse;
}
// return a status of the transaction archive
- String host = post.get("host");
- String depth = post.get("depth");
- int depthi = depth == null ? -1 : Integer.parseInt(depth);
- for (Transactions.State state: statename == null ?
+ final String host = post.get("host");
+ final String depth = post.get("depth");
+ final int depthi = depth == null ? -1 : Integer.parseInt(depth);
+ for (final Transactions.State state: statename == null ?
new Transactions.State[]{Transactions.State.INVENTORY, Transactions.State.ARCHIVE} :
new Transactions.State[]{Transactions.State.valueOf(statename)}) {
if (host == null) {
- JSONObject hostCountInventory = new JSONObject();
- for (String h: Transactions.listHosts(state)) {
- int size = Transactions.listIDsSize(h, depthi, state);
+ final JSONObject hostCountInventory = new JSONObject();
+ for (final String h: Transactions.listHosts(state)) {
+ final int size = Transactions.listIDsSize(h, depthi, state);
if (size > 0) hostCountInventory.put(h, size);
}
result.put("count." + state.name(), hostCountInventory);
} else {
- TreeMap> ids = Transactions.listIDs(host, depthi, state);
+ final TreeMap> ids = Transactions.listIDs(host, depthi, state);
if (ids == null) {
result.put("result", "fail");
result.put("comment", "no entries for host " + host + " found");
} else {
- for (Map.Entry> entry: ids.entrySet()) {
- for (Revisions r: entry.getValue()) {
+ for (final Map.Entry> entry: ids.entrySet()) {
+ for (final Revisions r: entry.getValue()) {
try {
- JSONObject metadata = new JSONObject();
- String u = r.url != null ? r.url : sb.index.fulltext().getURL(r.urlhash);
+ final JSONObject metadata = new JSONObject();
+ final String u = r.url != null ? r.url : sb.index.fulltext().getURL(r.urlhash);
metadata.put("url", u == null ? "unknown" : u);
metadata.put("dates", r.dates);
assert r.depth == entry.getKey().intValue();
metadata.put("depth", entry.getKey().intValue());
result.put(r.urlhash, metadata);
- } catch (IOException e) {}
+ } catch (final IOException e) {}
}
}
}
@@ -201,7 +203,7 @@ public class snapshot {
defaultResponse.authenticationRequired();
return defaultResponse;
}
- Revisions r = Transactions.commit(urlhash);
+ final Revisions r = Transactions.commit(urlhash);
if (r != null) {
result.put("result", "success");
result.put("depth", r.depth);
@@ -216,7 +218,7 @@ public class snapshot {
defaultResponse.authenticationRequired();
return defaultResponse;
}
- Revisions r = Transactions.rollback(urlhash);
+ final Revisions r = Transactions.rollback(urlhash);
if (r != null) {
result.put("result", "success");
result.put("depth", r.depth);
@@ -239,8 +241,8 @@ public class snapshot {
r = Transactions.getRevisions(state, urlhash);
}
if (r != null) {
- JSONObject metadata = new JSONObject();
- String u = r.url != null ? r.url : sb.index.fulltext().getURL(r.urlhash);
+ final JSONObject metadata = new JSONObject();
+ final String u = r.url != null ? r.url : sb.index.fulltext().getURL(r.urlhash);
metadata.put("url", u == null ? "unknown" : u);
metadata.put("dates", r.dates);
metadata.put("depth", r.depth);
@@ -249,7 +251,7 @@ public class snapshot {
}
} catch (IOException |IllegalArgumentException e) {}
}
- } catch (JSONException e) {
+ } catch (final JSONException e) {
ConcurrentLog.logException(e);
}
String json = result.toString();
@@ -263,14 +265,14 @@ public class snapshot {
}
if (xml) {
- Collection xmlSnapshots = Transactions.findPaths(durl, "xml", Transactions.State.ANY);
+ final Collection xmlSnapshots = Transactions.findPaths(durl, "xml", Transactions.State.ANY);
File xmlFile = null;
if (xmlSnapshots.isEmpty()) {
throw new TemplateProcessingException("Could not find the xml snapshot file.", HttpStatus.SC_NOT_FOUND);
}
xmlFile = xmlSnapshots.iterator().next();
try {
- byte[] xmlBinary = FileUtils.read(xmlFile);
+ final byte[] xmlBinary = FileUtils.read(xmlFile);
return new ByteArrayInputStream(xmlBinary);
} catch (final IOException e) {
ConcurrentLog.logException(e);
@@ -288,12 +290,12 @@ public class snapshot {
"Could not find the pdf snapshot file. You must be authenticated to generate one on the fly.",
HttpStatus.SC_NOT_FOUND);
}
- SolrDocument sd = sb.index.fulltext().getMetadata(durl.hash());
+ final SolrDocument sd = sb.index.fulltext().getMetadata(durl.hash());
boolean success = false;
if (sd == null) {
success = Transactions.store(durl, new Date(), 99, false, true, sb.getConfigBool(SwitchboardConstants.PROXY_TRANSPARENT_PROXY, false) ? "http://127.0.0.1:" + sb.getConfigInt(SwitchboardConstants.SERVER_PORT, 8090) : null, sb.getConfig("crawler.http.acceptLanguage", null));
} else {
- SolrInputDocument sid = sb.index.fulltext().getDefaultConfiguration().toSolrInputDocument(sd);
+ final SolrInputDocument sid = sb.index.fulltext().getDefaultConfiguration().toSolrInputDocument(sd);
success = Transactions.store(sid, false, true, true, sb.getConfigBool(SwitchboardConstants.PROXY_TRANSPARENT_PROXY, false) ? "http://127.0.0.1:" + sb.getConfigInt(SwitchboardConstants.SERVER_PORT, 8090) : null, sb.getConfig("crawler.http.acceptLanguage", null));
}
if (success) {
@@ -312,7 +314,7 @@ public class snapshot {
}
if (pdf) {
try {
- byte[] pdfBinary = FileUtils.read(pdfFile);
+ final byte[] pdfBinary = FileUtils.read(pdfFile);
return new ByteArrayInputStream(pdfBinary);
} catch (final IOException e) {
ConcurrentLog.logException(e);
@@ -321,10 +323,10 @@ public class snapshot {
}
if (pngjpg) {
- int width = Math.min(post.getInt("width", DEFAULT_WIDTH), DEFAULT_WIDTH);
- int height = Math.min(post.getInt("height", DEFAULT_HEIGHT), DEFAULT_HEIGHT);
+ final int width = Math.min(post.getInt("width", DEFAULT_WIDTH), DEFAULT_WIDTH);
+ final int height = Math.min(post.getInt("height", DEFAULT_HEIGHT), DEFAULT_HEIGHT);
String imageFileStub = pdfFile.getAbsolutePath(); imageFileStub = imageFileStub.substring(0, imageFileStub.length() - 3); // cut off extension
- File imageFile = new File(imageFileStub + DEFAULT_WIDTH + "." + DEFAULT_HEIGHT + "." + ext);
+ final File imageFile = new File(imageFileStub + DEFAULT_WIDTH + "." + DEFAULT_HEIGHT + "." + ext);
if (!imageFile.exists() && authenticated) {
if(!Html2Image.pdf2image(pdfFile, imageFile, DEFAULT_WIDTH, DEFAULT_HEIGHT, DEFAULT_DENSITY, DEFAULT_QUALITY)) {
throw new TemplateProcessingException(
@@ -339,7 +341,7 @@ public class snapshot {
}
if (width == DEFAULT_WIDTH && height == DEFAULT_HEIGHT) {
try {
- byte[] imageBinary = FileUtils.read(imageFile);
+ final byte[] imageBinary = FileUtils.read(imageFile);
return new ByteArrayInputStream(imageBinary);
} catch (final IOException e) {
ConcurrentLog.logException(e);
@@ -362,7 +364,7 @@ public class snapshot {
* Ensure there is no alpha component on the ouput image, as it is pointless
* here and it is not well supported by the JPEGImageWriter from OpenJDK
*/
- BufferedImage scaledBufferedImg = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
+ final BufferedImage scaledBufferedImg = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
scaledBufferedImg.createGraphics().drawImage(scaled, 0, 0, width, height, null);
return new EncodedImage(scaledBufferedImg, ext, true);
} catch (final IOException e) {
diff --git a/htroot/api/status_p.java b/source/net/yacy/htroot/api/status_p.java
similarity index 80%
rename from htroot/api/status_p.java
rename to source/net/yacy/htroot/api/status_p.java
index 1b877aa54..3d6f881ef 100644
--- a/htroot/api/status_p.java
+++ b/source/net/yacy/htroot/api/status_p.java
@@ -25,6 +25,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.io.IOException;
import net.yacy.cora.protocol.RequestHeader;
@@ -57,8 +59,8 @@ public class status_p {
final serverObjects prop = new serverObjects();
final boolean html = post != null && post.containsKey("html");
prop.setLocalized(html);
- Segment segment = sb.index;
- Fulltext fulltext = segment.fulltext();
+ final Segment segment = sb.index;
+ final Fulltext fulltext = segment.fulltext();
prop.put("rejected", "0");
sb.updateMySeed();
@@ -98,7 +100,7 @@ public class status_p {
prop.putNum("loaderMax", sb.getConfigLong(SwitchboardConstants.CRAWLER_THREADS_ACTIVE_MAX, 10));
//local crawl queue
- BusyThread localCrawl = sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
+ final BusyThread localCrawl = sb.getThread(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
prop.putNum("localCrawlSize", localCrawl == null ? 0 : localCrawl.getJobCount());
prop.put("localCrawlState", sb.crawlJobIsPaused(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL) ? STATE_PAUSED : STATE_RUNNING);
@@ -107,7 +109,7 @@ public class status_p {
prop.put("limitCrawlState", STATE_RUNNING);
//remote crawl queue
- BusyThread remoteCrawl = sb.getThread(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL);
+ final BusyThread remoteCrawl = sb.getThread(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL);
prop.putNum("remoteCrawlSize", remoteCrawl == null ? 0 : remoteCrawl.getJobCount());
prop.put("remoteCrawlState", sb.crawlJobIsPaused(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL) ? STATE_PAUSED : STATE_RUNNING);
@@ -125,7 +127,7 @@ public class status_p {
profile = sb.crawler.getActive(h);
if (CrawlSwitchboard.DEFAULT_PROFILES.contains(profile.name())) continue;
profile.putProfileEntry("crawlProfiles_list_", prop, true, false, count, domlistlength);
- RowHandleSet urlhashes = sb.crawler.getURLHashes(h);
+ final RowHandleSet urlhashes = sb.crawler.getURLHashes(h);
prop.put("crawlProfiles_list_" + count + "_count", urlhashes == null ? "unknown" : Integer.toString(urlhashes.size()));
if (profile.urlMustMatchPattern() == CrawlProfile.MATCH_ALL_PATTERN) {
hosts = hosts + "," + profile.name();
@@ -138,20 +140,20 @@ public class status_p {
prop.put("postprocessingRunning", CollectionConfiguration.postprocessingRunning ? 1 : 0);
- boolean processCollection = sb.index.fulltext().getDefaultConfiguration().contains(CollectionSchema.process_sxt) && (sb.index.connectedCitation() || sb.index.fulltext().useWebgraph());
- boolean processWebgraph = sb.index.fulltext().getWebgraphConfiguration().contains(WebgraphSchema.process_sxt) && sb.index.fulltext().useWebgraph();
+ final boolean processCollection = sb.index.fulltext().getDefaultConfiguration().contains(CollectionSchema.process_sxt) && (sb.index.connectedCitation() || sb.index.fulltext().useWebgraph());
+ final boolean processWebgraph = sb.index.fulltext().getWebgraphConfiguration().contains(WebgraphSchema.process_sxt) && sb.index.fulltext().useWebgraph();
- long timeSinceStart = (processCollection || processWebgraph) && CollectionConfiguration.postprocessingRunning ? System.currentTimeMillis() - CollectionConfiguration.postprocessingStartTime : 0;
+ final long timeSinceStart = (processCollection || processWebgraph) && CollectionConfiguration.postprocessingRunning ? System.currentTimeMillis() - CollectionConfiguration.postprocessingStartTime : 0;
//postprocessingCollection1Count = 0;
//postprocessingsWebgraphCount = 0;
long collectionRemainingCount = 0, webgraphRemainingCount = 0;
- if (processCollection) try {collectionRemainingCount = sb.index.fulltext().getDefaultConnector().getCountByQuery("{!cache=false}" + CollectionConfiguration.collection1query(sb.index, null));} catch (IOException e) {}
- if (processWebgraph) try {webgraphRemainingCount = sb.index.fulltext().getWebgraphConnector().getCountByQuery(CollectionConfiguration.webgraphquery(sb.index, null));} catch (IOException e) {}
- long countSinceStart = CollectionConfiguration.postprocessingRunning ? CollectionConfiguration.postprocessingCollection1Count + CollectionConfiguration.postprocessingWebgraphCount - collectionRemainingCount - webgraphRemainingCount : 0;
- int speed = timeSinceStart == 0 ? 0 : (int) (60000 * countSinceStart / timeSinceStart); // pages per minute
- long remainingTime = speed == 0 ? 0 : 60000 * collectionRemainingCount / speed; // millis
- int remainingTimeMinutes = (int) (remainingTime / 60000);
- int remainingTimeSeconds = (int) ((remainingTime - (remainingTimeMinutes * 60000)) / 1000);
+ if (processCollection) try {collectionRemainingCount = sb.index.fulltext().getDefaultConnector().getCountByQuery("{!cache=false}" + CollectionConfiguration.collection1query(sb.index, null));} catch (final IOException e) {}
+ if (processWebgraph) try {webgraphRemainingCount = sb.index.fulltext().getWebgraphConnector().getCountByQuery(CollectionConfiguration.webgraphquery(sb.index, null));} catch (final IOException e) {}
+ final long countSinceStart = CollectionConfiguration.postprocessingRunning ? CollectionConfiguration.postprocessingCollection1Count + CollectionConfiguration.postprocessingWebgraphCount - collectionRemainingCount - webgraphRemainingCount : 0;
+ final int speed = timeSinceStart == 0 ? 0 : (int) (60000 * countSinceStart / timeSinceStart); // pages per minute
+ final long remainingTime = speed == 0 ? 0 : 60000 * collectionRemainingCount / speed; // millis
+ final int remainingTimeMinutes = (int) (remainingTime / 60000);
+ final int remainingTimeSeconds = (int) ((remainingTime - (remainingTimeMinutes * 60000)) / 1000);
prop.put("postprocessingCollectionRemainingCount", collectionRemainingCount);
prop.put("postprocessingWebgraphRemainingCount", webgraphRemainingCount);
diff --git a/htroot/api/table_p.java b/source/net/yacy/htroot/api/table_p.java
similarity index 97%
rename from htroot/api/table_p.java
rename to source/net/yacy/htroot/api/table_p.java
index 65100f25a..bca9e37ec 100644
--- a/htroot/api/table_p.java
+++ b/source/net/yacy/htroot/api/table_p.java
@@ -17,6 +17,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
diff --git a/htroot/api/termlist_p.java b/source/net/yacy/htroot/api/termlist_p.java
similarity index 98%
rename from htroot/api/termlist_p.java
rename to source/net/yacy/htroot/api/termlist_p.java
index 7ce03ae2c..42de229ed 100644
--- a/htroot/api/termlist_p.java
+++ b/source/net/yacy/htroot/api/termlist_p.java
@@ -21,6 +21,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
@@ -39,12 +41,12 @@ import net.yacy.server.serverSwitch;
public class termlist_p {
private final static ConcurrentLog log = new ConcurrentLog("TERMLIST");
-
+
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
final Switchboard sb = (Switchboard) env;
- Segment segment = sb.index;
+ final Segment segment = sb.index;
final boolean delete = post != null && post.containsKey("delete");
final long mincount = post == null ? 10000 : post.getLong("mincount", 10000);
Rating e;
@@ -54,7 +56,7 @@ public class termlist_p {
String hstring;
final ArrayList deleteterms = new ArrayList();
long over1000 = 0, over10000 = 0, over100000 = 0, over1000000 = 0, over10000000 = 0, over100000000 = 0;
-
+
final IndexCell termIndex = segment.termIndex();
int rowsize = 0;
if(termIndex != null) {
diff --git a/htroot/api/timeline_p.java b/source/net/yacy/htroot/api/timeline_p.java
similarity index 75%
rename from htroot/api/timeline_p.java
rename to source/net/yacy/htroot/api/timeline_p.java
index c531a510b..91baf7415 100644
--- a/htroot/api/timeline_p.java
+++ b/source/net/yacy/htroot/api/timeline_p.java
@@ -24,6 +24,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Date;
@@ -38,8 +40,8 @@ import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.sorting.OrderedScoreMap;
import net.yacy.cora.util.CommonPattern;
-import net.yacy.search.EventTracker.Event;
import net.yacy.search.EventTracker;
+import net.yacy.search.EventTracker.Event;
import net.yacy.search.query.AccessTracker;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@@ -48,18 +50,18 @@ public final class timeline_p {
// example:
// http://localhost:8090/api/timeline_p.xml?from=20140601000000&to=20140629000000&data=queries&head=2&period=6h
-
+
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
if ((post == null) || (env == null)) return prop;
// get type of data to be listed in the timeline
- int maxeventsperperiod = post.getInt("head", 1); // the maximum number of events per period
- String period = post.get("period", ""); // must be an integer with a character c at the end, c = Y|M|d|h|m|s
+ final int maxeventsperperiod = post.getInt("head", 1); // the maximum number of events per period
+ final String period = post.get("period", ""); // must be an integer with a character c at the end, c = Y|M|d|h|m|s
long periodlength = 0;
if (period.length() > 0) {
- char c = period.charAt(period.length() - 1);
- long p = Long.parseLong(period.substring(0, period.length() - 1));
+ final char c = period.charAt(period.length() - 1);
+ final long p = Long.parseLong(period.substring(0, period.length() - 1));
if (c == 's') periodlength = p * AbstractFormatter.secondMillis;
else if (c == 'm') periodlength = p * AbstractFormatter.minuteMillis;
else if (c == 'h') periodlength = p * AbstractFormatter.hourMillis;
@@ -69,42 +71,42 @@ public final class timeline_p {
else periodlength = 0;
}
final String[] data = CommonPattern.COMMA.split(post.get("data", "")); // a string of word hashes that shall be searched and combined
- Map> proc = new HashMap<>();
- for (String s: data) if (s.length() > 0) proc.put(s, null);
-
+ final Map> proc = new HashMap<>();
+ for (final String s: data) if (s.length() > 0) proc.put(s, null);
+
// get a time period
Date fromDate = new Date(0);
Date toDate = new Date();
- try {fromDate = GenericFormatter.SHORT_SECOND_FORMATTER.parse(post.get("from", GenericFormatter.SHORT_SECOND_FORMATTER.format(fromDate)), 0).getTime();} catch (ParseException e) {}
- try {toDate = GenericFormatter.SHORT_SECOND_FORMATTER.parse(post.get("to", GenericFormatter.SHORT_SECOND_FORMATTER.format(toDate)), 0).getTime();} catch (ParseException e) {}
-
+ try {fromDate = GenericFormatter.SHORT_SECOND_FORMATTER.parse(post.get("from", GenericFormatter.SHORT_SECOND_FORMATTER.format(fromDate)), 0).getTime();} catch (final ParseException e) {}
+ try {toDate = GenericFormatter.SHORT_SECOND_FORMATTER.parse(post.get("to", GenericFormatter.SHORT_SECOND_FORMATTER.format(toDate)), 0).getTime();} catch (final ParseException e) {}
+
// get latest dump;
AccessTracker.dumpLog();
-
+
// fill proc with events from the given data and time period
if (proc.containsKey("queries")) {
- List events = AccessTracker.readLog(AccessTracker.getDumpFile(), fromDate, toDate);
+ final List events = AccessTracker.readLog(AccessTracker.getDumpFile(), fromDate, toDate);
proc.put("queries", events);
}
-
+
// mix all events into one event list
- TreeMap eax = new TreeMap<>();
- for (List events: proc.values()) if (events != null) {
- for (EventTracker.Event event: events) eax.put(event.getFormattedDate(), event);
+ final TreeMap eax = new TreeMap<>();
+ for (final List events: proc.values()) if (events != null) {
+ for (final EventTracker.Event event: events) eax.put(event.getFormattedDate(), event);
}
proc.clear(); // we don't need that here any more
List ea = new ArrayList<>();
- for (Event event: eax.values()) ea.add(event);
-
+ for (final Event event: eax.values()) ea.add(event);
+
if (periodlength > 0 && ea.size() > 0) {
// create a statistical analysis; step by chunks of periodlength entries
Event firstEvent = ea.iterator().next();
long startDate = fromDate.getTime();
//TreeMap
- OrderedScoreMap accumulation = new OrderedScoreMap<>(null);
- List eap = new ArrayList<>();
+ final OrderedScoreMap accumulation = new OrderedScoreMap<>(null);
+ final List eap = new ArrayList<>();
String limit = GenericFormatter.SHORT_SECOND_FORMATTER.format(new Date(startDate + periodlength));
- for (Event event: ea) {
+ for (final Event event: ea) {
if (event.getFormattedDate().compareTo(limit) >= 0) {
// write accumulation of the score map into eap
stats(accumulation, eap, startDate, periodlength, maxeventsperperiod, firstEvent.type);
@@ -115,14 +117,14 @@ public final class timeline_p {
accumulation.inc(event.payload.toString());
}
stats(accumulation, eap, startDate, periodlength, maxeventsperperiod, firstEvent.type);
-
+
// overwrite the old table for out
ea = eap;
}
-
+
// create a list of these events
int count = 0;
- for (Event event: ea) {
+ for (final Event event: ea) {
prop.put("event_" + count + "_time", event.getFormattedDate());
prop.put("event_" + count + "_isPeriod", event.duration == 0 ? 0 : 1);
prop.put("event_" + count + "_isPeriod_duration", event.duration);
@@ -136,15 +138,15 @@ public final class timeline_p {
return prop;
}
- private static void stats(OrderedScoreMap accumulation, List eap, long startDate, long periodlength, int head, String type) {
+ private static void stats(final OrderedScoreMap accumulation, final List eap, final long startDate, final long periodlength, final int head, final String type) {
// write accumulation of the score map into eap
- Iterator si = accumulation.keys(false);
+ final Iterator si = accumulation.keys(false);
int c = 0;
while (si.hasNext() && c++ < head) {
- String key = si.next();
+ final String key = si.next();
eap.add(new Event(startDate, periodlength, type, key, accumulation.get(key)));
}
accumulation.clear();
}
-
+
}
diff --git a/htroot/api/version.java b/source/net/yacy/htroot/api/version.java
similarity index 96%
rename from htroot/api/version.java
rename to source/net/yacy/htroot/api/version.java
index c19eff656..dd7833db5 100644
--- a/htroot/api/version.java
+++ b/source/net/yacy/htroot/api/version.java
@@ -1,5 +1,7 @@
+package net.yacy.htroot.api;
+
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.peers.operation.yacyBuildProperties;
import net.yacy.server.serverObjects;
diff --git a/htroot/api/webstructure.java b/source/net/yacy/htroot/api/webstructure.java
similarity index 90%
rename from htroot/api/webstructure.java
rename to source/net/yacy/htroot/api/webstructure.java
index e1cbf59d9..a437277a5 100644
--- a/htroot/api/webstructure.java
+++ b/source/net/yacy/htroot/api/webstructure.java
@@ -21,6 +21,7 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
import java.io.IOException;
import java.net.MalformedURLException;
@@ -113,25 +114,25 @@ public class webstructure {
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
final Switchboard sb = (Switchboard) env;
- String about = post == null ? null : post.get("about", null); // may be a URL, a URL hash or a domain hash
+ final String about = post == null ? null : post.get("about", null); // may be a URL, a URL hash or a domain hash
prop.put("out", 0);
prop.put("in", 0);
prop.put("references", 0);
prop.put("citations", 0);
- boolean authenticated = sb.adminAuthenticated(header) >= 2;
+ final boolean authenticated = sb.adminAuthenticated(header) >= 2;
if (about != null) {
DigestURL url = null;
byte[] urlhash = null;
- Set hostHashes = new HashSet<>();
+ final Set hostHashes = new HashSet<>();
if (about.length() == 6 && Base64Order.enhancedCoder.wellformed(ASCII.getBytes(about))) {
hostHashes.add(about);
} else if (about.length() == 12 && Base64Order.enhancedCoder.wellformed(ASCII.getBytes(about))) {
urlhash = ASCII.getBytes(about);
hostHashes.add(about.substring(6));
if (authenticated) try {
- String u = sb.getURL(urlhash);
+ final String u = sb.getURL(urlhash);
url = u == null ? null : new DigestURL(u);
- } catch (IOException e) {
+ } catch (final IOException e) {
url = null;
ConcurrentLog.logException(e);
}
@@ -187,7 +188,7 @@ public class webstructure {
prop.put("references", 1);
net.yacy.document.Document scraper = null;
if (url != null) try {
- ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
+ final ClientIdentification.Agent agent = ClientIdentification.getAgent(post.get("agentName", ClientIdentification.yacyInternetCrawlerAgentName));
scraper = sb.loader.loadDocument(url, CacheStrategy.IFEXIST, null, agent);
} catch (final IOException e) {
ConcurrentLog.logException(e);
@@ -203,8 +204,8 @@ public class webstructure {
int d = 0;
Iterator i = scraper.inboundLinks().keySet().iterator();
while (i.hasNext()) {
- DigestURL refurl = i.next();
- byte[] refhash = refurl.hash();
+ final DigestURL refurl = i.next();
+ final byte[] refhash = refurl.hash();
prop.putXML("references_documents_0_anchors_" + d + "_url", refurl.toNormalform(true));
prop.put("references_documents_0_anchors_" + d + "_hash", refhash);
prop.put("references_documents_0_anchors_" + d + "_outbound", 0);
@@ -212,8 +213,8 @@ public class webstructure {
}
i = scraper.outboundLinks().keySet().iterator();
while (i.hasNext()) {
- DigestURL refurl = i.next();
- byte[] refhash = refurl.hash();
+ final DigestURL refurl = i.next();
+ final byte[] refhash = refurl.hash();
prop.putXML("references_documents_0_anchors_" + d + "_url", refurl.toNormalform(true));
prop.put("references_documents_0_anchors_" + d + "_hash", refhash);
prop.put("references_documents_0_anchors_" + d + "_outbound", 1);
@@ -228,9 +229,9 @@ public class webstructure {
// citations
prop.put("citations", 1);
- ReferenceReportCache rrc = sb.index.getReferenceReportCache();
+ final ReferenceReportCache rrc = sb.index.getReferenceReportCache();
ReferenceReport rr = null;
- try {rr = rrc.getReferenceReport(ASCII.String(urlhash), true);} catch (IOException e) {}
+ try {rr = rrc.getReferenceReport(ASCII.String(urlhash), true);} catch (final IOException e) {}
if (rr != null && rr.getInternalCount() > 0 && rr.getExternalCount() > 0) {
prop.put("citations_count", 1);
prop.put("citations_documents", 1);
@@ -240,11 +241,11 @@ public class webstructure {
prop.put("citations_documents_0_urle", url == null ? 0 : 1);
if (url != null) prop.putXML("citations_documents_0_urle_url", url.toNormalform(true));
int d = 0;
- HandleSet ids = rr.getInternallIDs();
- try {ids.putAll(rr.getExternalIDs());} catch (SpaceExceededException e) {}
- Iterator i = ids.iterator();
+ final HandleSet ids = rr.getInternallIDs();
+ try {ids.putAll(rr.getExternalIDs());} catch (final SpaceExceededException e) {}
+ final Iterator i = ids.iterator();
while (i.hasNext()) {
- byte[] refhash = i.next();
+ final byte[] refhash = i.next();
String refurl;
try {
refurl = authenticated ? sb.getURL(refhash) : null;
@@ -253,7 +254,7 @@ public class webstructure {
prop.put("citations_documents_0_anchors_" + d + "_urle_hash", refhash);
prop.put("citations_documents_0_anchors_" + d + "_urle_date", GenericFormatter.SHORT_DAY_FORMATTER.format(new Date())); // superfluous?
d++;
- } catch (IOException e) {
+ } catch (final IOException e) {
ConcurrentLog.logException(e);
}
}
@@ -290,11 +291,11 @@ public class webstructure {
return prop;
}
- public static void reference(serverObjects prop, String prefix, int c, WebStructureGraph.StructureEntry sentry, WebStructureGraph ws) {
+ public static void reference(final serverObjects prop, final String prefix, final int c, final WebStructureGraph.StructureEntry sentry, final WebStructureGraph ws) {
prop.put(prefix + "_domains_" + c + "_hash", sentry.hosthash);
prop.putXML(prefix + "_domains_" + c + "_domain", sentry.hostname);
prop.put(prefix + "_domains_" + c + "_date", sentry.date);
- Iterator> k = sentry.references.entrySet().iterator();
+ final Iterator> k = sentry.references.entrySet().iterator();
Map.Entry refentry;
String refdom, refhash;
Integer refcount;
diff --git a/htroot/api/yacydoc.java b/source/net/yacy/htroot/api/yacydoc.java
similarity index 99%
rename from htroot/api/yacydoc.java
rename to source/net/yacy/htroot/api/yacydoc.java
index 5e12e67d4..ff3092773 100644
--- a/htroot/api/yacydoc.java
+++ b/source/net/yacy/htroot/api/yacydoc.java
@@ -25,6 +25,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.api;
+
import java.net.MalformedURLException;
import java.util.Arrays;
diff --git a/htroot/api/ynetSearch.java b/source/net/yacy/htroot/api/ynetSearch.java
similarity index 56%
rename from htroot/api/ynetSearch.java
rename to source/net/yacy/htroot/api/ynetSearch.java
index f03098c6d..84bd0f507 100644
--- a/htroot/api/ynetSearch.java
+++ b/source/net/yacy/htroot/api/ynetSearch.java
@@ -1,4 +1,5 @@
+package net.yacy.htroot.api;
import java.io.IOException;
import java.io.InputStream;
@@ -15,19 +16,19 @@ import net.yacy.server.serverSwitch;
public class ynetSearch {
- public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
+ public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Switchboard sb = (Switchboard) env;
final boolean isAdmin=sb.verifyAuthentication(header);
final serverObjects prop = new serverObjects();
- if(post != null){
- if(!isAdmin){
- // force authentication if desired
- if(post.containsKey("login")){
- prop.authenticationRequired();
- }
- return prop;
- }
+ if(post != null){
+ if(!isAdmin){
+ // force authentication if desired
+ if(post.containsKey("login")){
+ prop.authenticationRequired();
+ }
+ return prop;
+ }
InputStream is = null;
try {
String searchaddress = post.get("url");
@@ -41,21 +42,21 @@ public class ynetSearch {
String s = searchaddress;
Map.Entry k;
while(it.hasNext()) {
- k = it.next();
- s = s + "&" + k.getKey() + "=" + k.getValue();
+ k = it.next();
+ s = s + "&" + k.getKey() + "=" + k.getValue();
}
- // final String s = searchaddress+"&query="+post.get("search")+"&maximumRecords="+post.get("maximumRecords")+"&startRecord="+post.get("startRecord");
- final URL url = new URL(s);
- is = url.openStream();
- final String httpout = new Scanner(is).useDelimiter( "\\Z" ).next();
- prop.put("http", httpout);
+ // final String s = searchaddress+"&query="+post.get("search")+"&maximumRecords="+post.get("maximumRecords")+"&startRecord="+post.get("startRecord");
+ final URL url = new URL(s);
+ is = url.openStream();
+ final String httpout = new Scanner(is).useDelimiter( "\\Z" ).next();
+ prop.put("http", httpout);
} catch (final Exception e ) {
- prop.put("url", "error!");
+ prop.put("url", "error!");
} finally {
- if ( is != null )
- try { is.close(); } catch (final IOException e ) { }
+ if ( is != null )
+ try { is.close(); } catch (final IOException e ) { }
}
- }
- return prop;
- }
+ }
+ return prop;
+ }
}
\ No newline at end of file
diff --git a/htroot/env/style.java b/source/net/yacy/htroot/env/style.java
similarity index 94%
rename from htroot/env/style.java
rename to source/net/yacy/htroot/env/style.java
index 2501c7fdf..3525c62a9 100644
--- a/htroot/env/style.java
+++ b/source/net/yacy/htroot/env/style.java
@@ -17,6 +17,8 @@
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+package net.yacy.htroot.env;
+
import java.util.Iterator;
import net.yacy.cora.protocol.RequestHeader;
@@ -27,7 +29,7 @@ public class style {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, @SuppressWarnings("unused") final serverObjects post, final serverSwitch env) {
final serverObjects prop = new serverObjects();
- Iterator i = env.configKeys();
+ final Iterator i = env.configKeys();
String key;
while (i.hasNext()) {
key = i.next();
diff --git a/htroot/p2p/seeds.java b/source/net/yacy/htroot/p2p/seeds.java
similarity index 73%
rename from htroot/p2p/seeds.java
rename to source/net/yacy/htroot/p2p/seeds.java
index d734d9300..0fd0ff27d 100644
--- a/htroot/p2p/seeds.java
+++ b/source/net/yacy/htroot/p2p/seeds.java
@@ -1,4 +1,4 @@
-
+package net.yacy.htroot.p2p;
import java.util.ArrayList;
import java.util.Map;
@@ -24,38 +24,38 @@ import net.yacy.server.serverSwitch;
public final class seeds {
private static final int LISTMAX = 1000;
-
+
public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) {
// return variable that accumulates replacements
final Switchboard sb = (Switchboard) env;
- int maxcount = Math.min(LISTMAX, post == null ? Integer.MAX_VALUE : post.getInt("maxcount", Integer.MAX_VALUE));
- float minversion = Math.min(LISTMAX, post == null ? 0.0f : post.getFloat("minversion", 0.0f));
- boolean nodeonly = post == null || !post.containsKey("node") ? false : post.getBoolean("node");
- boolean includeme = post == null || !post.containsKey("me") ? true : post.getBoolean("me");
- boolean addressonly = post == null || !post.containsKey("address") ? false : post.getBoolean("address");
- String peername = post == null ? null : post.containsKey("my") ? sb.peers.myName() : post.get("peername");
+ final int maxcount = Math.min(LISTMAX, post == null ? Integer.MAX_VALUE : post.getInt("maxcount", Integer.MAX_VALUE));
+ final float minversion = Math.min(LISTMAX, post == null ? 0.0f : post.getFloat("minversion", 0.0f));
+ final boolean nodeonly = post == null || !post.containsKey("node") ? false : post.getBoolean("node");
+ final boolean includeme = post == null || !post.containsKey("me") ? true : post.getBoolean("me");
+ final boolean addressonly = post == null || !post.containsKey("address") ? false : post.getBoolean("address");
+ final String peername = post == null ? null : post.containsKey("my") ? sb.peers.myName() : post.get("peername");
final ArrayList v;
if (post != null && post.containsKey("my")) {
v = new ArrayList(1);
v.add(sb.peers.mySeed());
} else if (post != null && post.containsKey("id")) {
v = new ArrayList(1);
- Seed s = sb.peers.get(post.get("id"));
+ final Seed s = sb.peers.get(post.get("id"));
if (s != null) v.add(s);
} else if (post != null && post.containsKey("name")) {
v = new ArrayList(1);
- Seed s = sb.peers.lookupByName(post.get("name"));
+ final Seed s = sb.peers.lookupByName(post.get("name"));
if (s != null) v.add(s);
} else {
v= sb.peers.getSeedlist(maxcount, includeme, nodeonly, minversion);
}
final serverObjects prop = new serverObjects();
-
+
// write simple-encoded seed lines or json
- String EXT = header.get(HeaderFramework.CONNECTION_PROP_EXT);
- boolean json = EXT != null && EXT.equals("json");
- boolean xml = EXT != null && EXT.equals("xml");
-
+ final String EXT = header.get(HeaderFramework.CONNECTION_PROP_EXT);
+ final boolean json = EXT != null && EXT.equals("json");
+ final boolean xml = EXT != null && EXT.equals("xml");
+
if (json) {
// check for JSONP
if ( post != null && post.containsKey("callback") ) {
@@ -68,17 +68,17 @@ public final class seeds {
// construct json property lists
int count = 0;
for (int i = 0; i < v.size(); i++) {
- Seed seed = v.get(i);
+ final Seed seed = v.get(i);
if (peername != null && !peername.equals(seed.getName())) continue;
- Set ips = seed.getIPs();
+ final Set ips = seed.getIPs();
if (ips == null || ips.size() == 0) continue;
prop.putJSON("peers_" + count + "_map_0_k", Seed.HASH);
prop.put("peers_" + count + "_map_0_v", JSONObject.quote(seed.hash));
prop.put("peers_" + count + "_map_0_c", 1);
- Map map = seed.getMap();
+ final Map map = seed.getMap();
int c = 1;
if (!addressonly) {
- for (Map.Entry m: map.entrySet()) {
+ for (final Map.Entry m: map.entrySet()) {
prop.putJSON("peers_" + count + "_map_" + c + "_k", m.getKey());
prop.put("peers_" + count + "_map_" + c + "_v", JSONObject.quote(m.getValue()));
prop.put("peers_" + count + "_map_" + c + "_c", 1);
@@ -86,9 +86,9 @@ public final class seeds {
}
}
// construct a list of ips
- StringBuilder a = new StringBuilder();
+ final StringBuilder a = new StringBuilder();
a.append('[');
- for (String ip: ips) a.append(JSONObject.quote(seed.getPublicAddress(ip))).append(',');
+ for (final String ip: ips) a.append(JSONObject.quote(seed.getPublicAddress(ip))).append(',');
a.setCharAt(a.length()-1, ']');
prop.putJSON("peers_" + count + "_map_" + c + "_k", "Address");
prop.put("peers_" + count + "_map_" + c + "_v", a.toString());
@@ -103,22 +103,22 @@ public final class seeds {
} else if (xml) {
int count = 0;
for (int i = 0; i < v.size(); i++) {
- Seed seed = v.get(i);
+ final Seed seed = v.get(i);
if (peername != null && !peername.equals(seed.getName())) continue;
- Set ips = seed.getIPs();
+ final Set ips = seed.getIPs();
if (ips == null || ips.size() == 0) continue;
prop.putXML("peers_" + count + "_map_0_k", Seed.HASH);
prop.putXML("peers_" + count + "_map_0_v", seed.hash);
- Map map = seed.getMap();
+ final Map map = seed.getMap();
int c = 1;
if (!addressonly) {
- for (Map.Entry m: map.entrySet()) {
+ for (final Map.Entry m: map.entrySet()) {
prop.putXML("peers_" + count + "_map_" + c + "_k", m.getKey());
prop.putXML("peers_" + count + "_map_" + c + "_v", m.getValue());
c++;
}
}
- for (String ip: ips) {
+ for (final String ip: ips) {
prop.putXML("peers_" + count + "_map_" + c + "_k", "Address");
prop.putXML("peers_" + count + "_map_" + c + "_v", seed.getPublicAddress(ip));
c++;
@@ -126,17 +126,17 @@ public final class seeds {
prop.put("peers_" + count + "_map", c);
count++;
}
-
+
prop.put("peers_" + (count - 1) + "_c", 0);
prop.put("peers", count);
} else {
final StringBuilder encoded = new StringBuilder(1024);
- for (Seed seed: v) {
+ for (final Seed seed: v) {
encoded.append(seed.genSeedStr(null)).append(serverCore.CRLF_STRING);
- }
+ }
prop.put("encoded", encoded.toString());
}
-
+
// return rewrite properties
return prop;
}
diff --git a/htroot/proxymsg/urlproxyheader.java b/source/net/yacy/htroot/proxymsg/urlproxyheader.java
similarity index 92%
rename from htroot/proxymsg/urlproxyheader.java
rename to source/net/yacy/htroot/proxymsg/urlproxyheader.java
index 4f8691f63..6cf2e891a 100644
--- a/htroot/proxymsg/urlproxyheader.java
+++ b/source/net/yacy/htroot/proxymsg/urlproxyheader.java
@@ -1,3 +1,5 @@
+package net.yacy.htroot.proxymsg;
+
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.data.BookmarksDB.Bookmark;
import net.yacy.search.Switchboard;
@@ -15,19 +17,19 @@ public class urlproxyheader {
final Switchboard sb = (Switchboard) env;
String proxyurlstr = post.get("url",""); // the url of remote page currently viewed
- boolean hasRights = sb.verifyAuthentication(requestHeader);
+ final boolean hasRights = sb.verifyAuthentication(requestHeader);
prop.put("allowbookmark", hasRights);
if (post.containsKey("addbookmark")) {
proxyurlstr = post.get("bookmark");
- Bookmark bmk = sb.bookmarksDB.createorgetBookmark(proxyurlstr, null);
+ final Bookmark bmk = sb.bookmarksDB.createorgetBookmark(proxyurlstr, null);
if (bmk != null) {
bmk.setPublic(false);
bmk.addTag("/proxy"); // add to bookmark folder
sb.bookmarksDB.saveBookmark(bmk);
}
}
-
+
prop.put("proxyurl", proxyurlstr);
prop.put("allowbookmark_proxyurl", proxyurlstr);
@@ -36,7 +38,7 @@ public class urlproxyheader {
} else {
prop.put("httpsAlertMsg", "0");
}
-
+
// TODO: get some index data to display
/*
if (post.containsKey("hash")) {
diff --git a/source/net/yacy/http/servlets/YaCyDefaultServlet.java b/source/net/yacy/http/servlets/YaCyDefaultServlet.java
index 10a97af9b..16bb22241 100644
--- a/source/net/yacy/http/servlets/YaCyDefaultServlet.java
+++ b/source/net/yacy/http/servlets/YaCyDefaultServlet.java
@@ -773,18 +773,21 @@ public class YaCyDefaultServlet extends HttpServlet {
protected Class> rewriteClass(String target) {
assert target.charAt(0) == '/';
+ final int p = target.lastIndexOf('.');
+ if (p < 0) {
+ return null;
+ }
+ target = "net.yacy.htroot" + target.substring(0, p).replace('/', '.');
try {
- final int p = target.lastIndexOf('.');
- if (p < 0) {
- return null;
- }
- target = "net.yacy.htroot" + target.substring(0, p).replace('/', '.');
-
final Class> servletClass = Class.forName(target);
-
return servletClass;
} catch (final ClassNotFoundException e) {
- return null;
+ try {
+ final Class> servletClass = Class.forName(target + "_"); // for some targets we need alternative names
+ return servletClass;
+ } catch (final ClassNotFoundException ee) {
+ return null;
+ }
}
}