diff --git a/source/net/yacy/cora/date/ISO8601Formatter.java b/source/net/yacy/cora/date/ISO8601Formatter.java index e57dfbfa6..7c74143f8 100644 --- a/source/net/yacy/cora/date/ISO8601Formatter.java +++ b/source/net/yacy/cora/date/ISO8601Formatter.java @@ -52,7 +52,7 @@ public class ISO8601Formatter extends AbstractFormatter implements DateFormatter } /** - * Parse dates as defined in {@linkplain http://www.w3.org/TR/NOTE-datetime}. + * Parse dates as defined in http://www.w3.org/TR/NOTE-datetime. * This format (also specified in ISO8601) allows different "precisions". * The following lower precision versions for the complete date * "2007-12-19T10:20:30.567+0300" are allowed:
diff --git a/source/net/yacy/cora/protocol/http/HTTPClient.java b/source/net/yacy/cora/protocol/http/HTTPClient.java index 3a2c493e6..ff41aa5a6 100644 --- a/source/net/yacy/cora/protocol/http/HTTPClient.java +++ b/source/net/yacy/cora/protocol/http/HTTPClient.java @@ -100,7 +100,7 @@ import org.apache.http.util.EntityUtils; /** - * HttpClient implementation which uses HttpComponents Client {@link http://hc.apache.org/} + * HttpClient implementation which uses HttpComponents Client. * * @author sixcooler * diff --git a/source/net/yacy/data/list/ListAccumulator.java b/source/net/yacy/data/list/ListAccumulator.java index 788084d3a..70e576749 100644 --- a/source/net/yacy/data/list/ListAccumulator.java +++ b/source/net/yacy/data/list/ListAccumulator.java @@ -65,7 +65,7 @@ public class ListAccumulator { /** * Adds a new entry to a list identified by a given name. - * @param key The name of the list the entry is to be added to. + * @param list The name of the list the entry is to be added to. * @param entry The new entry. * @return True if the entry has been added, else false (if list does not exists). */ @@ -125,16 +125,14 @@ public class ListAccumulator { } /** - * Returns a {@link List} which contains all the {@link List Lists} of entries. - * @return list of lists. + * @return a {@link List} which contains all the {@link List Lists} of entries. */ public List> getEntryLists() { return entries; } /** - * Returns a {@link List} which contains all the {@link Map Maps} of entries. - * @return + * @return a {@link List} which contains all the {@link Map Maps} of entries. */ public List> getPropertyMaps() { return properties; diff --git a/source/net/yacy/data/list/XMLBlacklistImporter.java b/source/net/yacy/data/list/XMLBlacklistImporter.java index a05b9a5c6..68511e429 100644 --- a/source/net/yacy/data/list/XMLBlacklistImporter.java +++ b/source/net/yacy/data/list/XMLBlacklistImporter.java @@ -170,7 +170,7 @@ public class XMLBlacklistImporter extends DefaultHandler { * Writes characters to a String which might be used by endElement() later. * @param ch The characters. * @param start The start position in the character array. - * @param lengthThe number of characters to use from the character array. + * @param length The number of characters to use from the character array. * @throws org.xml.sax.SAXException */ @Override diff --git a/source/net/yacy/data/ymark/YMarkUtil.java b/source/net/yacy/data/ymark/YMarkUtil.java index b7c8b0d59..2d506f1d1 100644 --- a/source/net/yacy/data/ymark/YMarkUtil.java +++ b/source/net/yacy/data/ymark/YMarkUtil.java @@ -45,21 +45,21 @@ public class YMarkUtil { public final static Pattern FOLDERS_SEPARATOR_PATTERN = Pattern.compile(FOLDERS_SEPARATOR); /** - * conveniance function to generate url hashes for YMark bookmarks + * convenience function to generate url hashes for YMark bookmarks * @param url a string representation of a valid url * @return a byte[] hash for the input URL string * @throws MalformedURLException - * @see net.yacy.kelondro.data.meta.DigestURI.DigestURI(String url, byte[] hash).hash() + * @see {@link DigestURL#hash()} */ public final static byte[] getBookmarkId(String url) throws MalformedURLException { return (new DigestURL(url)).hash(); } /** - * conveniance function to generate word hashes for YMark tags and folders + * convenience function to generate word hashes for YMark tags and folders * @param key a tag or folder name * @return a byte[] hash for the input string - * @see net.yacy.kelondro.data.word.Word.word2hash(final String word) + * @see net.yacy.kelondro.data.word.Word#word2hash(String) */ public final static byte[] getKeyId(final String key) { return Word.word2hash(key.toLowerCase()); diff --git a/source/net/yacy/document/AbstractParser.java b/source/net/yacy/document/AbstractParser.java index b59fa1051..fc56784f1 100644 --- a/source/net/yacy/document/AbstractParser.java +++ b/source/net/yacy/document/AbstractParser.java @@ -76,8 +76,8 @@ public abstract class AbstractParser implements Parser { /** * check equivalence of parsers; this simply tests equality of parser names - * @param o - * @return + * @param o the object to check. Must be a {@link Parser} implementation. + * @return true when this parser is equivalent to o */ @Override public boolean equals(final Object o) { diff --git a/source/net/yacy/document/Document.java b/source/net/yacy/document/Document.java index 209d30f96..8da6a943f 100644 --- a/source/net/yacy/document/Document.java +++ b/source/net/yacy/document/Document.java @@ -765,7 +765,7 @@ dc_rights * This is similar to mergeDocuments but directly joins internal content variables, * uses less parsed details and keeps this documents crawl data (like crawldepth, lastmodified) * - * @see mergeDocuments() + * @see #mergeDocuments(DigestURL, String, Document[]) * @param docs to be included * @throws IOException */ diff --git a/source/net/yacy/document/LargeNumberCache.java b/source/net/yacy/document/LargeNumberCache.java index f9fe13ae3..ccba8593c 100644 --- a/source/net/yacy/document/LargeNumberCache.java +++ b/source/net/yacy/document/LargeNumberCache.java @@ -41,7 +41,7 @@ public class LargeNumberCache { * Returns a Integer instance representing the specified int value. * If a new Integer instance is not required, this method * should generally be used in preference to the constructor - * {@link #Integer(int)}, as this method is likely to yield + * {@link Integer#Integer(int)}, as this method is likely to yield * significantly better space and time performance by caching * frequently requested values. * diff --git a/source/net/yacy/document/LibraryProvider.java b/source/net/yacy/document/LibraryProvider.java index dde93b7f7..0c324d0ff 100644 --- a/source/net/yacy/document/LibraryProvider.java +++ b/source/net/yacy/document/LibraryProvider.java @@ -99,12 +99,11 @@ public class LibraryProvider { } /** - * initialize the LibraryProvider as static class. This assigns default paths, and initializes the + * Initialize the LibraryProvider as static class. This assigns default paths, and initializes the * dictionary classes Additionally, if default dictionaries are given in the source path, they are * translated into the input format inside the DATA/DICTIONARIES directory * - * @param pathToSource - * @param pathToDICTIONARIES + * @param rootPath parent path of dictionaries */ public static void initialize(final File rootPath) { dictSource = new File(rootPath, path_to_source_dictionaries); diff --git a/source/net/yacy/document/Parser.java b/source/net/yacy/document/Parser.java index 0b278ab69..cd9a352bc 100644 --- a/source/net/yacy/document/Parser.java +++ b/source/net/yacy/document/Parser.java @@ -74,8 +74,7 @@ public interface Parser { /** * check equivalence of parsers; this simply tests equality of parser names - * @param o - * @return + * @return true when this parser is equivalent to o */ @Override public boolean equals(Object o); diff --git a/source/net/yacy/document/TextParser.java b/source/net/yacy/document/TextParser.java index 8657071a5..a91dfeeba 100644 --- a/source/net/yacy/document/TextParser.java +++ b/source/net/yacy/document/TextParser.java @@ -433,7 +433,7 @@ public final class TextParser { /** * checks if the parser supports the given extension. It is not only checked if the parser can parse such files, * it is also checked if the extension is not included in the extension-deny list. - * @param extention + * @param ext extension name * @return an error if the extension is not supported, null otherwise */ public static String supportsExtension(final String ext) { @@ -448,9 +448,9 @@ public final class TextParser { } /** - * checks if the parser supports the given extension. It is not only checked if the parser can parse such files, + * checks if the parser supports the given extension or the file at the specified url. It is not only checked if the parser can parse such files, * it is also checked if the extension is not included in the extension-deny list. - * @param extention + * @param url url to check * @return an error if the extension is not supported, null otherwise */ public static String supportsExtension(final MultiProtocolURL url) { diff --git a/source/net/yacy/document/content/DCEntry.java b/source/net/yacy/document/content/DCEntry.java index 07b3f4560..ed4b33c6a 100644 --- a/source/net/yacy/document/content/DCEntry.java +++ b/source/net/yacy/document/content/DCEntry.java @@ -122,7 +122,7 @@ public class DCEntry extends MultiMapSolrParams { /** * get Identifier (url) (so far only used for surrogate processing) * @param useRelationAsAlternative true = take relation if no identifier resolves to url - * @return + * @return this entry identifier url */ public DigestURL getIdentifier(boolean useRelationAsAlternative) { // identifier may be included multiple times (with all kinds of syntax - example is from on record) diff --git a/source/net/yacy/document/importer/Importer.java b/source/net/yacy/document/importer/Importer.java index a1fc158e6..efcd6734e 100644 --- a/source/net/yacy/document/importer/Importer.java +++ b/source/net/yacy/document/importer/Importer.java @@ -30,21 +30,18 @@ public interface Importer extends Runnable { public int count(); /** - * return the number of articles per second - * @return + * @return the number of articles per second */ public int speed(); /** - * return the time this import is already running - * @return + * @return the time this import is already running */ public long runningTime(); /** - * return the remaining seconds for the completion of all records in milliseconds - * @return + * @return the remaining seconds for the completion of all records in milliseconds */ public long remainingTime(); diff --git a/source/net/yacy/document/importer/MediawikiImporter.java b/source/net/yacy/document/importer/MediawikiImporter.java index 465546e65..dface870e 100644 --- a/source/net/yacy/document/importer/MediawikiImporter.java +++ b/source/net/yacy/document/importer/MediawikiImporter.java @@ -123,8 +123,7 @@ public class MediawikiImporter extends Thread implements Importer { } /** - * return the number of articles per second - * @return + * @return the number of articles per second */ @Override public int speed() { @@ -133,8 +132,7 @@ public class MediawikiImporter extends Thread implements Importer { } /** - * return the remaining seconds for the completion of all records in milliseconds - * @return + * @return the remaining seconds for the completion of all records in milliseconds */ @Override public long remainingTime() { diff --git a/source/net/yacy/document/importer/ResumptionToken.java b/source/net/yacy/document/importer/ResumptionToken.java index 25075410d..855628267 100644 --- a/source/net/yacy/document/importer/ResumptionToken.java +++ b/source/net/yacy/document/importer/ResumptionToken.java @@ -88,8 +88,7 @@ public class ResumptionToken extends TreeMap { /** * compute a url that can be used to resume the retrieval from the OAI-PMH resource - * @param givenURL - * @return + * @return the computed url * @throws IOException in case that no follow-up url can be generated; i.e. if the expiration date is exceeded */ public DigestURL resumptionURL() throws IOException { @@ -149,10 +148,10 @@ public class ResumptionToken extends TreeMap { } /** - * an expiration date of a resumption token that addresses how long a cached set will - * stay in the cache of the oai-pmh server. See: - * http://www.openarchives.org/OAI/2.0/guidelines-repository.htm#CachedResultSet - * @return + * See: + * Implementation Guidelines for the Open Archives + * @return an expiration date of a resumption token that addresses how long a cached set will + * stay in the cache of the oai-pmh server */ public Date getExpirationDate() { final String d = get("expirationDate"); @@ -174,8 +173,8 @@ public class ResumptionToken extends TreeMap { * In other cases, it is permissible for repositories to revise * the estimate during a list request sequence. * An attribute according to - * http://www.openarchives.org/OAI/2.0/guidelines-repository.htm#completeListSize - * @return + * Implementation Guidelines for the Open Archives + * @return the completeListSize attribute */ public int getCompleteListSize() { final String t = get("completeListSize"); @@ -188,8 +187,8 @@ public class ResumptionToken extends TreeMap { * thus it is always "0" in the first incomplete list response. * It should only be specified if it is consistently used in all responses. * An attribute according to - * http://www.openarchives.org/OAI/2.0/guidelines-repository.htm#completeListSize - * @return + * Implementation Guidelines for the Open Archives + * @return the cursor attribute */ public int getCursor() { final String t = get("cursor"); @@ -198,10 +197,9 @@ public class ResumptionToken extends TreeMap { } /** - * get a token of the stateless transfer in case that no expiration date is given - * see: - * http://www.openarchives.org/OAI/2.0/guidelines-repository.htm#StateInResumptionToken - * @return + * See: + * Implementation Guidelines for the Open Archives + * @return a token of the stateless transfer in case that no expiration date is given */ public String getToken() { return get("token"); diff --git a/source/net/yacy/document/parser/apkParser.java b/source/net/yacy/document/parser/apkParser.java index 8dd5b0daf..f1d0b2129 100644 --- a/source/net/yacy/document/parser/apkParser.java +++ b/source/net/yacy/document/parser/apkParser.java @@ -376,9 +376,9 @@ public class apkParser extends AbstractParser implements Parser { * Return the string stored in StringTable format at offset strOff. * This offset points to the 16 bit string length, which * is followed by that number of 16 bit (Unicode) chars. - * @param arr - * @param strOff - * @return + * @param arr source byte array + * @param strOff offset position + * @return the computed string */ public String compXmlStringAt(byte[] arr, int strOff) { int strLen = arr[strOff + 1] << 8 & 0xff00 | arr[strOff] & 0xff; @@ -392,10 +392,9 @@ public class apkParser extends AbstractParser implements Parser { } /** - * Return value of a Little Endian 32 bit word from the byte array at offset off. - * @param arr - * @param off - * @return + * @param arr source byte array + * @param off byte array offset position + * @return value of a Little Endian 32 bit word from the byte array at offset off. */ public int LEW(byte[] arr, int off) { return arr[off + 3] << 24 & 0xff000000 | arr[off + 2] << 16 & 0xff0000 | arr[off + 1] << 8 & 0xff00 | arr[off] & 0xFF; diff --git a/source/net/yacy/document/parser/docParser.java b/source/net/yacy/document/parser/docParser.java index eb832bfbd..2d3dd2bae 100644 --- a/source/net/yacy/document/parser/docParser.java +++ b/source/net/yacy/document/parser/docParser.java @@ -141,7 +141,7 @@ public class docParser extends AbstractParser implements Parser { * @param location * @param mimeType * @param poifs - * @return + * @return an array containing one Document * @throws net.yacy.document.Parser.Failure */ public Document[] parseOldWordDoc( diff --git a/source/net/yacy/document/parser/html/ContentScraper.java b/source/net/yacy/document/parser/html/ContentScraper.java index 15f3bace2..49207428b 100644 --- a/source/net/yacy/document/parser/html/ContentScraper.java +++ b/source/net/yacy/document/parser/html/ContentScraper.java @@ -217,8 +217,9 @@ public class ContentScraper extends AbstractScraper implements Scraper { /** * scrape a document * @param root the document root url - * @param maxLinks the maximum number of links to scapre - * @param classDetector a map from class names to vocabulary names to scrape content from the DOM with associated class name + * @param maxLinks the maximum number of links to scrape + * @param vocabularyScraper handles maps from class names to vocabulary names and from documents to a map from vocabularies to terms + * @param timezoneOffset local time zone offset */ @SuppressWarnings("unchecked") public ContentScraper(final DigestURL root, int maxLinks, final VocabularyScraper vocabularyScraper, int timezoneOffset) { diff --git a/source/net/yacy/document/parser/html/Evaluation.java b/source/net/yacy/document/parser/html/Evaluation.java index 190801502..7d2ba74c5 100644 --- a/source/net/yacy/document/parser/html/Evaluation.java +++ b/source/net/yacy/document/parser/html/Evaluation.java @@ -41,15 +41,15 @@ import net.yacy.cora.util.ConcurrentLog; import net.yacy.kelondro.util.MemoryControl; -/* +/** * This class provides methods to use a pattern analysis for html files * The pattern analysis is generic and can be configured using a field-name/pattern property * configuration file. * Such a configuration file has names of the structure - * _ + * <subject-name>_<document-element> * and values are regular java expressions - * A html file is scanned for pattern matchings within a specific - * and if such a matching can be found then the is collected as + * A html file is scanned for pattern matchings within a specific <document-element> + * and if such a matching can be found then the <attribute-name> is collected as * subject for the scanned document * patternProperties files must have special file names where the file name * starts with the word "parser." and must end with ".properties" @@ -192,10 +192,10 @@ public class Evaluation { } /** - * calculate the scores for a model - * the scores is a attribute/count map which count how often a specific attribute was found + * Calculate the scores for a model. + * The scores is a attribute/count map which count how often a specific attribute was found * @param modelName - * @return + * @return the calculated scores */ public ClusteredScoreMap getScores(final String modelName) { return this.modelMap.get(modelName); diff --git a/source/net/yacy/document/parser/html/ImageEntry.java b/source/net/yacy/document/parser/html/ImageEntry.java index 4433cf0b5..39273f0c5 100644 --- a/source/net/yacy/document/parser/html/ImageEntry.java +++ b/source/net/yacy/document/parser/html/ImageEntry.java @@ -41,8 +41,6 @@ public class ImageEntry implements Comparable, Comparator - * Then it iterates through all existing {@link CrawlProfiles crawl profiles} and removes all profiles + * Then it iterates through all existing {@link CrawlProfile crawl profiles} and removes all profiles * which are not hard-coded. *

*

@@ -1847,10 +1847,6 @@ public final class Switchboard extends serverSwitch { * will be returned *

* - * @see #CRAWL_PROFILE_PROXY hardcoded - * @see #CRAWL_PROFILE_REMOTE hardcoded - * @see #CRAWL_PROFILE_SNIPPET_TEXT hardcoded - * @see #CRAWL_PROFILE_SNIPPET_MEDIA hardcoded * @return whether this method has done something or not (i.e. because the queues have been filled or * there are no profiles left to clean up) * @throws InterruptedException if the current thread has been interrupted, i.e. by the shutdown @@ -3319,12 +3315,12 @@ public final class Switchboard extends serverSwitch { } /** - * load the content of a URL, parse the content and add the content to the index This process is started + * load the content of some URLs, parse the content and add the content to the index This process is started * concurrently. The method returns immediately after the call. * Loaded/indexed pages are added to the given SearchEvent. If this is not required prefer addToCrawler * to spare concurrent processes, bandwidth and intransparent crawl/load activity * - * @param url the url that shall be indexed + * @param urls the urls that shall be indexed * @param searchEvent (optional) a search event that shall get results from the indexed pages directly * feeded. If object is null then it is ignored * @throws IOException @@ -3413,11 +3409,11 @@ public final class Switchboard extends serverSwitch { } /** - * add url to Crawler - which itself loads the URL, parses the content and adds it to the index + * add urls to Crawler - which itself loads the URL, parses the content and adds it to the index * transparent alternative to "addToIndex" including, double in crawler check, display in crawl monitor * but doesn't return results for a ongoing search * - * @param url the url that shall be indexed + * @param urls the urls that shall be indexed * @param asglobal true adds the url to global crawl queue (for remote crawling), false to the local crawler */ public void addToCrawler(final Collection urls, final boolean asglobal) { diff --git a/source/net/yacy/search/SwitchboardConstants.java b/source/net/yacy/search/SwitchboardConstants.java index 7e6cb7abd..ddc10283c 100644 --- a/source/net/yacy/search/SwitchboardConstants.java +++ b/source/net/yacy/search/SwitchboardConstants.java @@ -26,7 +26,7 @@ package net.yacy.search; -import net.yacy.kelondro.util.MapTools; +import net.yacy.cora.order.Digest; import net.yacy.server.http.RobotsTxtConfig; /** @@ -39,7 +39,7 @@ public final class SwitchboardConstants { /** *

public static final String ADMIN_ACCOUNT_B64MD5 = "adminAccountBase64MD5"

*

Name of the setting holding the authentication hash for the static admin-account. It is calculated - * by first encoding username:password as Base64 and hashing it using {@link MapTools#encodeMD5Hex(String)}.

+ * by first encoding username:password as Base64 and hashing it using {@link Digest#encodeMD5Hex(String)}.

* With introduction of DIGEST authentication all passwords are MD5 encoded and calculatd as username:adminrealm:password * To differentiate old and new admin passwords, use the new calculated passwords a "MD5:" prefix. */ @@ -101,7 +101,6 @@ public final class SwitchboardConstants { *

Name of the local crawler thread, popping one entry off the Local Crawl Queue, and passing it to the * proxy cache enqueue thread to download and further process it

* - * @see Switchboard#PROXY_CACHE_ENQUEUE */ public static final String CRAWLJOB_LOCAL_CRAWL = "50_localcrawl"; public static final String CRAWLJOB_LOCAL_CRAWL_METHOD_START = "coreCrawlJob"; @@ -126,7 +125,7 @@ public final class SwitchboardConstants { *

public static final String CRAWLJOB_REMOTE_CRAWL_LOADER = "60_remotecrawlloader"

*

Name of the remote crawl list loading thread

* - * @see Switchboard#CRAWLJOB_REMOTE_CRAWL_LOADER + * @see #CRAWLJOB_REMOTE_CRAWL_LOADER */ public static final String CRAWLJOB_REMOTE = "crawlResponse"; // enable/disable response to remote crawl requests public static final String CRAWLJOB_REMOTE_CRAWL_LOADER = "60_remotecrawlloader"; @@ -211,7 +210,7 @@ public final class SwitchboardConstants { *

public static final String INDEX_DIST_ALLOW = "allowDistributeIndex"

*

Name of the setting whether Index Distribution shall be allowed (and the DHT-thread therefore started) or not

* - * @see Switchboard#INDEX_DIST_ALLOW_WHILE_CRAWLING + * @see #INDEX_DIST_ALLOW_WHILE_CRAWLING */ public static final String INDEX_DIST_ALLOW = "allowDistributeIndex"; public static final String INDEX_RECEIVE_ALLOW = "allowReceiveIndex"; @@ -224,7 +223,7 @@ public final class SwitchboardConstants { * the Local Crawler Queue is filled.

*

This setting only has effect if {@link #INDEX_DIST_ALLOW} is enabled

* - * @see Switchboard#INDEX_DIST_ALLOW + * @see #INDEX_DIST_ALLOW */ public static final String INDEX_DIST_ALLOW_WHILE_CRAWLING = "allowDistributeIndexWhileCrawling"; public static final String INDEX_DIST_ALLOW_WHILE_INDEXING = "allowDistributeIndexWhileIndexing"; @@ -251,14 +250,6 @@ public final class SwitchboardConstants { public static final String PROXY_INDEXING_LOCAL_TEXT = "proxyIndexingLocalText"; public static final String PROXY_INDEXING_LOCAL_MEDIA = "proxyIndexingLocalMedia"; public static final String PROXY_CACHE_SIZE = "proxyCacheSize"; - /** - *

public static final String PROXY_CACHE_LAYOUT = "proxyCacheLayout"

- *

Name of the setting which file-/folder-layout the proxy cache shall use. Possible values are {@link #PROXY_CACHE_LAYOUT_TREE} - * and {@link #PROXY_CACHE_LAYOUT_HASH}

- * - * @see Switchboard#PROXY_CACHE_LAYOUT_TREE - * @see Switchboard#PROXY_CACHE_LAYOUT_HASH - */ public static final String PROXY_YACY_ONLY = "proxyYacyOnly"; public static final String PROXY_TRANSPARENT_PROXY = "isTransparentProxy"; @@ -360,8 +351,6 @@ public final class SwitchboardConstants { *

Name of the setting specifying the folder beginning from the YaCy-installation's top-folder, where all * downloaded webpages and their respective ressources and HTTP-headers are stored. It is the location containing * the proxy-cache

- * - * @see Switchboard#PROXY_CACHE_LAYOUT for details on the file-layout in this path */ public static final String HTCACHE_PATH = "proxyCache"; public static final String HTCACHE_PATH_DEFAULT = "DATA/HTCACHE"; @@ -415,13 +404,6 @@ public final class SwitchboardConstants { *

Name of the setting specifying the folder beginning from the YaCy-installation's top-folder, where all * DBs containing "work" of the user are saved. Such include bookmarks, messages, wiki, blog

* - * @see Switchboard#DBFILE_BLOG - * @see Switchboard#DBFILE_BOOKMARKS - * @see Switchboard#DBFILE_BOOKMARKS_DATES - * @see Switchboard#DBFILE_BOOKMARKS_TAGS - * @see Switchboard#DBFILE_MESSAGE - * @see Switchboard#DBFILE_WIKI - * @see Switchboard#DBFILE_WIKI_BKP */ public static final String WORK_PATH = "workPath"; public static final String WORK_PATH_DEFAULT = "DATA/WORK"; diff --git a/source/net/yacy/search/index/Fulltext.java b/source/net/yacy/search/index/Fulltext.java index 8a079764c..4cf3d9d1c 100644 --- a/source/net/yacy/search/index/Fulltext.java +++ b/source/net/yacy/search/index/Fulltext.java @@ -262,8 +262,7 @@ public final class Fulltext { } /** - * get the size of the webgraph index - * @return + * @return the size of the webgraph index */ public long webgraphSize() { return this.writeWebgraph ? this.getWebgraphConnector().getSize() : 0; @@ -390,7 +389,7 @@ public final class Fulltext { /** * using a fragment of the url hash (6 bytes: bytes 6 to 11) it is possible to address all urls from a specific domain * here such a fragment can be used to delete all these domains at once - * @param hosthash the hash of the host to be deleted + * @param hosthashes the hashes of the hosts to be deleted * @param freshdate either NULL or a date in the past which is the limit for deletion. Only documents older than this date are deleted * @throws IOException */ @@ -460,9 +459,8 @@ public final class Fulltext { /** * remove a full subpath from the index - * @param subpath the left path of the url; at least until the end of the host + * @param basepath the left path of the url; at least until the end of the host * @param freshdate either NULL or a date in the past which is the limit for deletion. Only documents older than this date are deleted - * @param concurrently if true, then the method returnes immediately and runs concurrently */ public int remove(final String basepath, Date freshdate) { DigestURL uri; @@ -510,7 +508,7 @@ public final class Fulltext { * Deletes document with id=urlHash from fulltext index and document with * source_id_s=urlHash from webgraph index * @param urlHash the document id - * @return + * @return false */ public boolean remove(final byte[] urlHash) { if (urlHash == null) return false; @@ -570,7 +568,7 @@ public final class Fulltext { /** * create a dump file from the current solr directory - * @return + * @return file reference to the dump */ public File dumpSolr() { EmbeddedInstance esc = this.solrInstances.getEmbedded(); diff --git a/source/net/yacy/search/index/Segment.java b/source/net/yacy/search/index/Segment.java index 831acbf24..40eacc2bb 100644 --- a/source/net/yacy/search/index/Segment.java +++ b/source/net/yacy/search/index/Segment.java @@ -126,10 +126,9 @@ public class Segment { /** * create a new Segment - * @param log - * @param segmentPath that should be the path ponting to the directory "SEGMENT" - * @param collectionSchema - * @throws IOException + * @param log logger instance + * @param segmentPath that should be the path pointing to the directory "SEGMENT" + * @throws IOException when an error occurs */ public Segment(final ConcurrentLog log, final File segmentPath, final File archivePath, final CollectionConfiguration collectionConfiguration, final WebgraphConfiguration webgraphConfiguration) throws IOException { @@ -399,7 +398,7 @@ public class Segment { /** * get the load time of a resource. - * @param urlHash + * @param urlhash the resource hash * @return the time in milliseconds since epoch for the load time or -1 if the document does not exist */ public long getLoadTime(final String urlhash) throws IOException { diff --git a/source/net/yacy/search/navigator/LanguageNavigator.java b/source/net/yacy/search/navigator/LanguageNavigator.java index 56cc44850..ffebc93eb 100644 --- a/source/net/yacy/search/navigator/LanguageNavigator.java +++ b/source/net/yacy/search/navigator/LanguageNavigator.java @@ -36,8 +36,7 @@ public class LanguageNavigator extends StringNavigator implements Navigator { /** * Default constructor, using the default YaCy Solr field language_s. * - * @param title - * @param field the SolrDocument schema field containing language code + * @param title the navigator display name */ public LanguageNavigator(String title) { super(title, CollectionSchema.language_s); diff --git a/source/net/yacy/search/navigator/Navigator.java b/source/net/yacy/search/navigator/Navigator.java index 1ba857e09..020bdcadc 100644 --- a/source/net/yacy/search/navigator/Navigator.java +++ b/source/net/yacy/search/navigator/Navigator.java @@ -80,7 +80,7 @@ public interface Navigator extends ScoreMap { * The navigator looks for a field in the document and increases the counts * depending on the value in the document field. * - * @param docs document + * @param doc document */ public void incDoc(URIMetadataNode doc); diff --git a/source/net/yacy/search/navigator/RestrictedStringNavigator.java b/source/net/yacy/search/navigator/RestrictedStringNavigator.java index af3a6aa12..d6d44a3bc 100644 --- a/source/net/yacy/search/navigator/RestrictedStringNavigator.java +++ b/source/net/yacy/search/navigator/RestrictedStringNavigator.java @@ -81,7 +81,6 @@ public class RestrictedStringNavigator extends StringNavigator implements Naviga /** * Increase counter if item allowed and not forbidden - * @param key */ @Override public void inc(ScoreMap map) { diff --git a/source/net/yacy/search/navigator/YearNavigator.java b/source/net/yacy/search/navigator/YearNavigator.java index 01567f5c4..5b74da9b8 100644 --- a/source/net/yacy/search/navigator/YearNavigator.java +++ b/source/net/yacy/search/navigator/YearNavigator.java @@ -148,9 +148,9 @@ public class YearNavigator extends StringNavigator implements Navigator { /** * For date_in_content_dts it return true if form:YEAR and to:YEAR is part * of the modifier, otherwise false. - * @param modifier + * @param modifier the search query modifier * @param name 4 digit year string - * @return + * @return true when when the modifier is active */ @Override public boolean modifieractive(QueryModifier modifier, String name) { diff --git a/source/net/yacy/search/query/QueryGoal.java b/source/net/yacy/search/query/QueryGoal.java index fb4eff8da..f20d8ffd1 100644 --- a/source/net/yacy/search/query/QueryGoal.java +++ b/source/net/yacy/search/query/QueryGoal.java @@ -203,7 +203,7 @@ public class QueryGoal { * the modifier are held separately in a search paramter modifier * * @param encodeHTML - * @return + * @return the search query string */ public String getQueryString(final boolean encodeHTML) { if (this.query_original == null) return null; diff --git a/source/net/yacy/search/query/QueryParams.java b/source/net/yacy/search/query/QueryParams.java index 263fc2c8d..ed8455385 100644 --- a/source/net/yacy/search/query/QueryParams.java +++ b/source/net/yacy/search/query/QueryParams.java @@ -669,11 +669,7 @@ public final class QueryParams { /** * make a query anchor tag - * @param page - * @param theQuery - * @param originalUrlMask - * @param addToQuery - * @return + * @return the anchor url builder */ public static StringBuilder navurl(final RequestHeader.FileType ext, final int page, final QueryParams theQuery, final String newQueryString, boolean newModifierReplacesOld) { diff --git a/source/net/yacy/search/schema/CollectionConfiguration.java b/source/net/yacy/search/schema/CollectionConfiguration.java index 8317e6729..93074d9b0 100644 --- a/source/net/yacy/search/schema/CollectionConfiguration.java +++ b/source/net/yacy/search/schema/CollectionConfiguration.java @@ -218,7 +218,6 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri * @param doc * @param allAttr * @param digestURL - * @param doctype * @return the normalized url */ public String addURIAttributes(final SolrInputDocument doc, final boolean allAttr, final DigestURL digestURL) { @@ -2104,13 +2103,13 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri return a; } - /** - * encode a string containing attributes from anchor rel properties binary: - * bit 0: "me" contained in rel - * bit 1: "nofollow" contained in rel - * @param rel - * @return binary encoded information about rel - */ +// /** +// * encode a string containing attributes from anchor rel properties binary: +// * bit 0: "me" contained in rel +// * bit 1: "nofollow" contained in rel +// * @param rel +// * @return binary encoded information about rel +// */ /* private static List relEval(final List rel) { List il = new ArrayList(rel.size()); diff --git a/source/net/yacy/search/snippet/TextSnippet.java b/source/net/yacy/search/snippet/TextSnippet.java index da230fb79..c04d73958 100644 --- a/source/net/yacy/search/snippet/TextSnippet.java +++ b/source/net/yacy/search/snippet/TextSnippet.java @@ -412,7 +412,7 @@ public class TextSnippet implements Comparable, Comparator, Comparator queryHashes) {