diff --git a/source/net/yacy/cora/date/ISO8601Formatter.java b/source/net/yacy/cora/date/ISO8601Formatter.java
index e57dfbfa6..7c74143f8 100644
--- a/source/net/yacy/cora/date/ISO8601Formatter.java
+++ b/source/net/yacy/cora/date/ISO8601Formatter.java
@@ -52,7 +52,7 @@ public class ISO8601Formatter extends AbstractFormatter implements DateFormatter
}
/**
- * Parse dates as defined in {@linkplain http://www.w3.org/TR/NOTE-datetime}.
+ * Parse dates as defined in http://www.w3.org/TR/NOTE-datetime.
* This format (also specified in ISO8601) allows different "precisions".
* The following lower precision versions for the complete date
* "2007-12-19T10:20:30.567+0300" are allowed:
diff --git a/source/net/yacy/cora/protocol/http/HTTPClient.java b/source/net/yacy/cora/protocol/http/HTTPClient.java
index 3a2c493e6..ff41aa5a6 100644
--- a/source/net/yacy/cora/protocol/http/HTTPClient.java
+++ b/source/net/yacy/cora/protocol/http/HTTPClient.java
@@ -100,7 +100,7 @@ import org.apache.http.util.EntityUtils;
/**
- * HttpClient implementation which uses HttpComponents Client {@link http://hc.apache.org/}
+ * HttpClient implementation which uses HttpComponents Client.
*
* @author sixcooler
*
diff --git a/source/net/yacy/data/list/ListAccumulator.java b/source/net/yacy/data/list/ListAccumulator.java
index 788084d3a..70e576749 100644
--- a/source/net/yacy/data/list/ListAccumulator.java
+++ b/source/net/yacy/data/list/ListAccumulator.java
@@ -65,7 +65,7 @@ public class ListAccumulator {
/**
* Adds a new entry to a list identified by a given name.
- * @param key The name of the list the entry is to be added to.
+ * @param list The name of the list the entry is to be added to.
* @param entry The new entry.
* @return True if the entry has been added, else false (if list does not exists).
*/
@@ -125,16 +125,14 @@ public class ListAccumulator {
}
/**
- * Returns a {@link List} which contains all the {@link List Lists} of entries.
- * @return list of lists.
+ * @return a {@link List} which contains all the {@link List Lists} of entries.
*/
public List> getEntryLists() {
return entries;
}
/**
- * Returns a {@link List} which contains all the {@link Map Maps} of entries.
- * @return
+ * @return a {@link List} which contains all the {@link Map Maps} of entries.
*/
public List
*
@@ -1847,10 +1847,6 @@ public final class Switchboard extends serverSwitch {
* will be returned
*
*
- * @see #CRAWL_PROFILE_PROXY hardcoded
- * @see #CRAWL_PROFILE_REMOTE hardcoded
- * @see #CRAWL_PROFILE_SNIPPET_TEXT hardcoded
- * @see #CRAWL_PROFILE_SNIPPET_MEDIA hardcoded
* @return whether this method has done something or not (i.e. because the queues have been filled or
* there are no profiles left to clean up)
* @throws InterruptedException if the current thread has been interrupted, i.e. by the shutdown
@@ -3319,12 +3315,12 @@ public final class Switchboard extends serverSwitch {
}
/**
- * load the content of a URL, parse the content and add the content to the index This process is started
+ * load the content of some URLs, parse the content and add the content to the index This process is started
* concurrently. The method returns immediately after the call.
* Loaded/indexed pages are added to the given SearchEvent. If this is not required prefer addToCrawler
* to spare concurrent processes, bandwidth and intransparent crawl/load activity
*
- * @param url the url that shall be indexed
+ * @param urls the urls that shall be indexed
* @param searchEvent (optional) a search event that shall get results from the indexed pages directly
* feeded. If object is null then it is ignored
* @throws IOException
@@ -3413,11 +3409,11 @@ public final class Switchboard extends serverSwitch {
}
/**
- * add url to Crawler - which itself loads the URL, parses the content and adds it to the index
+ * add urls to Crawler - which itself loads the URL, parses the content and adds it to the index
* transparent alternative to "addToIndex" including, double in crawler check, display in crawl monitor
* but doesn't return results for a ongoing search
*
- * @param url the url that shall be indexed
+ * @param urls the urls that shall be indexed
* @param asglobal true adds the url to global crawl queue (for remote crawling), false to the local crawler
*/
public void addToCrawler(final Collection urls, final boolean asglobal) {
diff --git a/source/net/yacy/search/SwitchboardConstants.java b/source/net/yacy/search/SwitchboardConstants.java
index 7e6cb7abd..ddc10283c 100644
--- a/source/net/yacy/search/SwitchboardConstants.java
+++ b/source/net/yacy/search/SwitchboardConstants.java
@@ -26,7 +26,7 @@
package net.yacy.search;
-import net.yacy.kelondro.util.MapTools;
+import net.yacy.cora.order.Digest;
import net.yacy.server.http.RobotsTxtConfig;
/**
@@ -39,7 +39,7 @@ public final class SwitchboardConstants {
/**
* public static final String ADMIN_ACCOUNT_B64MD5 = "adminAccountBase64MD5"
* Name of the setting holding the authentication hash for the static admin
-account. It is calculated
- * by first encoding username:password
as Base64 and hashing it using {@link MapTools#encodeMD5Hex(String)}.
+ * by first encoding username:password
as Base64 and hashing it using {@link Digest#encodeMD5Hex(String)}.
* With introduction of DIGEST authentication all passwords are MD5 encoded and calculatd as username:adminrealm:password
* To differentiate old and new admin passwords, use the new calculated passwords a "MD5:" prefix.
*/
@@ -101,7 +101,6 @@ public final class SwitchboardConstants {
* Name of the local crawler thread, popping one entry off the Local Crawl Queue, and passing it to the
* proxy cache enqueue thread to download and further process it
*
- * @see Switchboard#PROXY_CACHE_ENQUEUE
*/
public static final String CRAWLJOB_LOCAL_CRAWL = "50_localcrawl";
public static final String CRAWLJOB_LOCAL_CRAWL_METHOD_START = "coreCrawlJob";
@@ -126,7 +125,7 @@ public final class SwitchboardConstants {
* public static final String CRAWLJOB_REMOTE_CRAWL_LOADER = "60_remotecrawlloader"
* Name of the remote crawl list loading thread
*
- * @see Switchboard#CRAWLJOB_REMOTE_CRAWL_LOADER
+ * @see #CRAWLJOB_REMOTE_CRAWL_LOADER
*/
public static final String CRAWLJOB_REMOTE = "crawlResponse"; // enable/disable response to remote crawl requests
public static final String CRAWLJOB_REMOTE_CRAWL_LOADER = "60_remotecrawlloader";
@@ -211,7 +210,7 @@ public final class SwitchboardConstants {
* public static final String INDEX_DIST_ALLOW = "allowDistributeIndex"
* Name of the setting whether Index Distribution shall be allowed (and the DHT-thread therefore started) or not
*
- * @see Switchboard#INDEX_DIST_ALLOW_WHILE_CRAWLING
+ * @see #INDEX_DIST_ALLOW_WHILE_CRAWLING
*/
public static final String INDEX_DIST_ALLOW = "allowDistributeIndex";
public static final String INDEX_RECEIVE_ALLOW = "allowReceiveIndex";
@@ -224,7 +223,7 @@ public final class SwitchboardConstants {
* the Local Crawler Queue is filled.
* This setting only has effect if {@link #INDEX_DIST_ALLOW} is enabled
*
- * @see Switchboard#INDEX_DIST_ALLOW
+ * @see #INDEX_DIST_ALLOW
*/
public static final String INDEX_DIST_ALLOW_WHILE_CRAWLING = "allowDistributeIndexWhileCrawling";
public static final String INDEX_DIST_ALLOW_WHILE_INDEXING = "allowDistributeIndexWhileIndexing";
@@ -251,14 +250,6 @@ public final class SwitchboardConstants {
public static final String PROXY_INDEXING_LOCAL_TEXT = "proxyIndexingLocalText";
public static final String PROXY_INDEXING_LOCAL_MEDIA = "proxyIndexingLocalMedia";
public static final String PROXY_CACHE_SIZE = "proxyCacheSize";
- /**
- * public static final String PROXY_CACHE_LAYOUT = "proxyCacheLayout"
- * Name of the setting which file-/folder-layout the proxy cache shall use. Possible values are {@link #PROXY_CACHE_LAYOUT_TREE}
- * and {@link #PROXY_CACHE_LAYOUT_HASH}
- *
- * @see Switchboard#PROXY_CACHE_LAYOUT_TREE
- * @see Switchboard#PROXY_CACHE_LAYOUT_HASH
- */
public static final String PROXY_YACY_ONLY = "proxyYacyOnly";
public static final String PROXY_TRANSPARENT_PROXY = "isTransparentProxy";
@@ -360,8 +351,6 @@ public final class SwitchboardConstants {
* Name of the setting specifying the folder beginning from the YaCy-installation's top-folder, where all
* downloaded webpages and their respective ressources and HTTP-headers are stored. It is the location containing
* the proxy-cache
- *
- * @see Switchboard#PROXY_CACHE_LAYOUT for details on the file-layout in this path
*/
public static final String HTCACHE_PATH = "proxyCache";
public static final String HTCACHE_PATH_DEFAULT = "DATA/HTCACHE";
@@ -415,13 +404,6 @@ public final class SwitchboardConstants {
* Name of the setting specifying the folder beginning from the YaCy-installation's top-folder, where all
* DBs containing "work" of the user are saved. Such include bookmarks, messages, wiki, blog
*
- * @see Switchboard#DBFILE_BLOG
- * @see Switchboard#DBFILE_BOOKMARKS
- * @see Switchboard#DBFILE_BOOKMARKS_DATES
- * @see Switchboard#DBFILE_BOOKMARKS_TAGS
- * @see Switchboard#DBFILE_MESSAGE
- * @see Switchboard#DBFILE_WIKI
- * @see Switchboard#DBFILE_WIKI_BKP
*/
public static final String WORK_PATH = "workPath";
public static final String WORK_PATH_DEFAULT = "DATA/WORK";
diff --git a/source/net/yacy/search/index/Fulltext.java b/source/net/yacy/search/index/Fulltext.java
index 8a079764c..4cf3d9d1c 100644
--- a/source/net/yacy/search/index/Fulltext.java
+++ b/source/net/yacy/search/index/Fulltext.java
@@ -262,8 +262,7 @@ public final class Fulltext {
}
/**
- * get the size of the webgraph index
- * @return
+ * @return the size of the webgraph index
*/
public long webgraphSize() {
return this.writeWebgraph ? this.getWebgraphConnector().getSize() : 0;
@@ -390,7 +389,7 @@ public final class Fulltext {
/**
* using a fragment of the url hash (6 bytes: bytes 6 to 11) it is possible to address all urls from a specific domain
* here such a fragment can be used to delete all these domains at once
- * @param hosthash the hash of the host to be deleted
+ * @param hosthashes the hashes of the hosts to be deleted
* @param freshdate either NULL or a date in the past which is the limit for deletion. Only documents older than this date are deleted
* @throws IOException
*/
@@ -460,9 +459,8 @@ public final class Fulltext {
/**
* remove a full subpath from the index
- * @param subpath the left path of the url; at least until the end of the host
+ * @param basepath the left path of the url; at least until the end of the host
* @param freshdate either NULL or a date in the past which is the limit for deletion. Only documents older than this date are deleted
- * @param concurrently if true, then the method returnes immediately and runs concurrently
*/
public int remove(final String basepath, Date freshdate) {
DigestURL uri;
@@ -510,7 +508,7 @@ public final class Fulltext {
* Deletes document with id=urlHash from fulltext index and document with
* source_id_s=urlHash from webgraph index
* @param urlHash the document id
- * @return
+ * @return false
*/
public boolean remove(final byte[] urlHash) {
if (urlHash == null) return false;
@@ -570,7 +568,7 @@ public final class Fulltext {
/**
* create a dump file from the current solr directory
- * @return
+ * @return file reference to the dump
*/
public File dumpSolr() {
EmbeddedInstance esc = this.solrInstances.getEmbedded();
diff --git a/source/net/yacy/search/index/Segment.java b/source/net/yacy/search/index/Segment.java
index 831acbf24..40eacc2bb 100644
--- a/source/net/yacy/search/index/Segment.java
+++ b/source/net/yacy/search/index/Segment.java
@@ -126,10 +126,9 @@ public class Segment {
/**
* create a new Segment
- * @param log
- * @param segmentPath that should be the path ponting to the directory "SEGMENT"
- * @param collectionSchema
- * @throws IOException
+ * @param log logger instance
+ * @param segmentPath that should be the path pointing to the directory "SEGMENT"
+ * @throws IOException when an error occurs
*/
public Segment(final ConcurrentLog log, final File segmentPath, final File archivePath,
final CollectionConfiguration collectionConfiguration, final WebgraphConfiguration webgraphConfiguration) throws IOException {
@@ -399,7 +398,7 @@ public class Segment {
/**
* get the load time of a resource.
- * @param urlHash
+ * @param urlhash the resource hash
* @return the time in milliseconds since epoch for the load time or -1 if the document does not exist
*/
public long getLoadTime(final String urlhash) throws IOException {
diff --git a/source/net/yacy/search/navigator/LanguageNavigator.java b/source/net/yacy/search/navigator/LanguageNavigator.java
index 56cc44850..ffebc93eb 100644
--- a/source/net/yacy/search/navigator/LanguageNavigator.java
+++ b/source/net/yacy/search/navigator/LanguageNavigator.java
@@ -36,8 +36,7 @@ public class LanguageNavigator extends StringNavigator implements Navigator {
/**
* Default constructor, using the default YaCy Solr field language_s.
*
- * @param title
- * @param field the SolrDocument schema field containing language code
+ * @param title the navigator display name
*/
public LanguageNavigator(String title) {
super(title, CollectionSchema.language_s);
diff --git a/source/net/yacy/search/navigator/Navigator.java b/source/net/yacy/search/navigator/Navigator.java
index 1ba857e09..020bdcadc 100644
--- a/source/net/yacy/search/navigator/Navigator.java
+++ b/source/net/yacy/search/navigator/Navigator.java
@@ -80,7 +80,7 @@ public interface Navigator extends ScoreMap {
* The navigator looks for a field in the document and increases the counts
* depending on the value in the document field.
*
- * @param docs document
+ * @param doc document
*/
public void incDoc(URIMetadataNode doc);
diff --git a/source/net/yacy/search/navigator/RestrictedStringNavigator.java b/source/net/yacy/search/navigator/RestrictedStringNavigator.java
index af3a6aa12..d6d44a3bc 100644
--- a/source/net/yacy/search/navigator/RestrictedStringNavigator.java
+++ b/source/net/yacy/search/navigator/RestrictedStringNavigator.java
@@ -81,7 +81,6 @@ public class RestrictedStringNavigator extends StringNavigator implements Naviga
/**
* Increase counter if item allowed and not forbidden
- * @param key
*/
@Override
public void inc(ScoreMap map) {
diff --git a/source/net/yacy/search/navigator/YearNavigator.java b/source/net/yacy/search/navigator/YearNavigator.java
index 01567f5c4..5b74da9b8 100644
--- a/source/net/yacy/search/navigator/YearNavigator.java
+++ b/source/net/yacy/search/navigator/YearNavigator.java
@@ -148,9 +148,9 @@ public class YearNavigator extends StringNavigator implements Navigator {
/**
* For date_in_content_dts it return true if form:YEAR and to:YEAR is part
* of the modifier, otherwise false.
- * @param modifier
+ * @param modifier the search query modifier
* @param name 4 digit year string
- * @return
+ * @return true when when the modifier is active
*/
@Override
public boolean modifieractive(QueryModifier modifier, String name) {
diff --git a/source/net/yacy/search/query/QueryGoal.java b/source/net/yacy/search/query/QueryGoal.java
index fb4eff8da..f20d8ffd1 100644
--- a/source/net/yacy/search/query/QueryGoal.java
+++ b/source/net/yacy/search/query/QueryGoal.java
@@ -203,7 +203,7 @@ public class QueryGoal {
* the modifier are held separately in a search paramter modifier
*
* @param encodeHTML
- * @return
+ * @return the search query string
*/
public String getQueryString(final boolean encodeHTML) {
if (this.query_original == null) return null;
diff --git a/source/net/yacy/search/query/QueryParams.java b/source/net/yacy/search/query/QueryParams.java
index 263fc2c8d..ed8455385 100644
--- a/source/net/yacy/search/query/QueryParams.java
+++ b/source/net/yacy/search/query/QueryParams.java
@@ -669,11 +669,7 @@ public final class QueryParams {
/**
* make a query anchor tag
- * @param page
- * @param theQuery
- * @param originalUrlMask
- * @param addToQuery
- * @return
+ * @return the anchor url builder
*/
public static StringBuilder navurl(final RequestHeader.FileType ext, final int page, final QueryParams theQuery, final String newQueryString, boolean newModifierReplacesOld) {
diff --git a/source/net/yacy/search/schema/CollectionConfiguration.java b/source/net/yacy/search/schema/CollectionConfiguration.java
index 8317e6729..93074d9b0 100644
--- a/source/net/yacy/search/schema/CollectionConfiguration.java
+++ b/source/net/yacy/search/schema/CollectionConfiguration.java
@@ -218,7 +218,6 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
* @param doc
* @param allAttr
* @param digestURL
- * @param doctype
* @return the normalized url
*/
public String addURIAttributes(final SolrInputDocument doc, final boolean allAttr, final DigestURL digestURL) {
@@ -2104,13 +2103,13 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
return a;
}
- /**
- * encode a string containing attributes from anchor rel properties binary:
- * bit 0: "me" contained in rel
- * bit 1: "nofollow" contained in rel
- * @param rel
- * @return binary encoded information about rel
- */
+// /**
+// * encode a string containing attributes from anchor rel properties binary:
+// * bit 0: "me" contained in rel
+// * bit 1: "nofollow" contained in rel
+// * @param rel
+// * @return binary encoded information about rel
+// */
/*
private static List relEval(final List rel) {
List il = new ArrayList(rel.size());
diff --git a/source/net/yacy/search/snippet/TextSnippet.java b/source/net/yacy/search/snippet/TextSnippet.java
index da230fb79..c04d73958 100644
--- a/source/net/yacy/search/snippet/TextSnippet.java
+++ b/source/net/yacy/search/snippet/TextSnippet.java
@@ -412,7 +412,7 @@ public class TextSnippet implements Comparable, Comparator, Comparator queryHashes) {