// plasmaCrawlLURL.java
// -----------------------
// part of YaCy
// (C) by Michael Peter Christen; mc@anomic.de
// first published on http://www.anomic.de
// Frankfurt, Germany, 2004
//
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// Using this software in any meaning (reading, learning, copying, compiling,
// running) means that you agree that the Author(s) is (are) not responsible
// for cost, loss of data or any harm that may be caused directly or indirectly
// by usage of this softare or this documentation. The usage of this software
// is on your own risk. The installation and usage (starting/running) of this
// software may allow other people or application to access your computer and
// any attached devices and is highly dependent on the configuration of the
// software which must be done by the user of the software; the author(s) is
// (are) also not responsible for proper configuration and usage of the
// software, even if provoked by documentation provided together with
// the software.
//
// Any changes to this file according to the GPL as documented in the file
// gpl.txt aside this file in the shipment you received can be done to the
// lines that follows this copyright notice here, but changes must not be
// done inside the copyright notive above. A re-distribution must contain
// the intact and unchanged copyright notice.
// Contributions and changes to the program code must be marked as such.
/*
This class provides storage functions for the plasma search engine.
- the url-specific properties, including condenser results
- the text content of the url
Both entities are accessed with a hash, which is based on the MD5
algorithm. The MD5 is not encoded as a hex value, but a b64 value.
*/
package de.anomic.plasma;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.Locale;
import java.util.Properties;
import de.anomic.http.httpc;
import de.anomic.kelondro.kelondroTree;
import de.anomic.server.serverCodings;
import de.anomic.server.serverObjects;
import de.anomic.server.logging.serverLog;
import de.anomic.tools.crypt;
import de.anomic.yacy.yacyCore;
import de.anomic.yacy.yacySeed;
public final class plasmaCrawlLURL extends plasmaURL {
// result stacks;
// these have all entries of form
// strings: urlHash + initiatorHash + ExecutorHash
private final LinkedList externResultStack; // 1 - remote index: retrieved by other peer
private final LinkedList searchResultStack; // 2 - partly remote/local index: result of search queries
private final LinkedList transfResultStack; // 3 - partly remote/local index: result of index transfer
private final LinkedList proxyResultStack; // 4 - local index: result of proxy fetch/prefetch
private final LinkedList lcrawlResultStack; // 5 - local index: result of local crawling
private final LinkedList gcrawlResultStack; // 6 - local index: triggered external
public plasmaCrawlLURL(File cachePath, int bufferkb) throws IOException {
super();
int[] ce = {
urlHashLength,
urlStringLength,
urlDescrLength,
urlDateLength,
urlDateLength,
urlHashLength,
urlCopyCountLength,
urlFlagLength,
urlQualityLength,
urlLanguageLength,
urlDoctypeLength,
urlSizeLength,
urlWordCountLength
};
int segmentsize = 0;
for (int i = 0; i < ce.length; i++) { segmentsize += ce[i]; }
if (cachePath.exists()) {
// open existing cache
urlHashCache = new kelondroTree(cachePath, bufferkb * 0x400);
} else {
// create new cache
cachePath.getParentFile().mkdirs();
urlHashCache = new kelondroTree(cachePath, bufferkb * 0x400, ce);
}
// init result stacks
externResultStack = new LinkedList();
searchResultStack = new LinkedList();
transfResultStack = new LinkedList();
proxyResultStack = new LinkedList();
lcrawlResultStack = new LinkedList();
gcrawlResultStack = new LinkedList();
}
public synchronized Entry addEntry(URL url, String descr, Date moddate, Date loaddate,
String initiatorHash, String executorHash,
String referrerHash, int copyCount, boolean localNeed,
int quality, String language, char doctype,
long size, int wordCount, int stackType) {
Entry e = new Entry(url, descr, moddate, loaddate, referrerHash, copyCount, localNeed, quality, language, doctype, size, wordCount);
if (initiatorHash == null) { initiatorHash = dummyHash; }
if (executorHash == null) { executorHash = dummyHash; }
switch (stackType) {
case 0: break;
case 1: externResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 2: searchResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 3: transfResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 4: proxyResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 5: lcrawlResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 6: gcrawlResultStack.add(e.urlHash + initiatorHash + executorHash); break;
}
return e;
}
public synchronized Entry addEntry(Entry e, String initiatorHash, String executorHash, int stackType) {
if (e == null) { return null; }
try {
if (initiatorHash == null) { initiatorHash = dummyHash; }
if (executorHash == null) { executorHash = dummyHash; }
switch (stackType) {
case 0: break;
case 1: externResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 2: searchResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 3: transfResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 4: proxyResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 5: lcrawlResultStack.add(e.urlHash + initiatorHash + executorHash); break;
case 6: gcrawlResultStack.add(e.urlHash + initiatorHash + executorHash); break;
}
return e;
} catch (Exception ex) {
System.out.println("INTERNAL ERROR in newEntry/2: " + ex.toString());
return null;
}
}
public void notifyGCrawl(String urlHash, String initiatorHash, String executorHash) {
gcrawlResultStack.add(urlHash + initiatorHash + executorHash);
}
public synchronized Entry getEntry(String hash) {
return new Entry(hash);
}
public synchronized Entry newEntry(Entry oldEntry) {
if (oldEntry == null) return null;
/*
* de.anomic.plasma.plasmaCrawlLURL.Entry.Entry(URL url, String descr,
* Date moddate, Date loaddate,
* String referrerHash,
* int copyCount,
* boolean localNeed,
* int quality,
* String language,
* char doctype,
* long size,
* int wordCount)
*/
return new Entry(
oldEntry.url(),
oldEntry.descr(),
oldEntry.moddate(),
oldEntry.loaddate(),
oldEntry.referrerHash(),
oldEntry.copyCount(),
oldEntry.local(),
oldEntry.quality(),
oldEntry.language(),
oldEntry.doctype(),
oldEntry.size(),
oldEntry.wordCount());
}
public synchronized Entry newEntry(String propStr, boolean setGlobal) {
if (propStr.startsWith("{") && propStr.endsWith("}")) {
return new Entry(serverCodings.s2p(propStr.substring(1, propStr.length() - 1)), setGlobal);
} else {
return null;
}
}
public int getStackSize(int stack) {
switch (stack) {
case 1: return externResultStack.size();
case 2: return searchResultStack.size();
case 3: return transfResultStack.size();
case 4: return proxyResultStack.size();
case 5: return lcrawlResultStack.size();
case 6: return gcrawlResultStack.size();
}
return -1;
}
public String getUrlHash(int stack, int pos) {
switch (stack) {
case 1: return ((String) externResultStack.get(pos)).substring(0, urlHashLength);
case 2: return ((String) searchResultStack.get(pos)).substring(0, urlHashLength);
case 3: return ((String) transfResultStack.get(pos)).substring(0, urlHashLength);
case 4: return ((String) proxyResultStack.get(pos)).substring(0, urlHashLength);
case 5: return ((String) lcrawlResultStack.get(pos)).substring(0, urlHashLength);
case 6: return ((String) gcrawlResultStack.get(pos)).substring(0, urlHashLength);
}
return null;
}
public String getInitiatorHash(int stack, int pos) {
switch (stack) {
case 1: return ((String) externResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
case 2: return ((String) searchResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
case 3: return ((String) transfResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
case 4: return ((String) proxyResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
case 5: return ((String) lcrawlResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
case 6: return ((String) gcrawlResultStack.get(pos)).substring(urlHashLength, urlHashLength * 2);
}
return null;
}
public String getExecutorHash(int stack, int pos) {
switch (stack) {
case 1: return ((String) externResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
case 2: return ((String) searchResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
case 3: return ((String) transfResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
case 4: return ((String) proxyResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
case 5: return ((String) lcrawlResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
case 6: return ((String) gcrawlResultStack.get(pos)).substring(urlHashLength * 2, urlHashLength * 3);
}
return null;
}
public boolean removeStack(int stack, int pos) {
Object prevElement = null;
switch (stack) {
case 1: prevElement = externResultStack.remove(pos); break;
case 2: prevElement = searchResultStack.remove(pos); break;
case 3: prevElement = transfResultStack.remove(pos); break;
case 4: prevElement = proxyResultStack.remove(pos); break;
case 5: prevElement = lcrawlResultStack.remove(pos); break;
case 6: prevElement = gcrawlResultStack.remove(pos); break;
}
return prevElement != null;
}
public void clearStack(int stack) {
switch (stack) {
case 1: externResultStack.clear(); break;
case 2: searchResultStack.clear(); break;
case 3: transfResultStack.clear(); break;
case 4: proxyResultStack.clear(); break;
case 5: lcrawlResultStack.clear(); break;
case 6: gcrawlResultStack.clear(); break;
}
}
public boolean remove(String urlHash) {
boolean exists1 = super.remove(urlHash);
for (int stack = 1; stack <= 6; stack++) {
for (int i = getStackSize(stack) - 1; i >= 0; i--) {
if (getUrlHash(stack,i).equals(urlHash)) {
boolean exits2 = removeStack(stack,i);
exists1 = exists1 || exits2;
return exists1;
}
}
}
return exists1;
}
private static SimpleDateFormat dayFormatter = new SimpleDateFormat("yyyy/MM/dd", Locale.US);
private static String daydate(Date date) {
if (date == null) {
return "";
} else {
return dayFormatter.format(date);
}
}
/**
* This function shortens the String url
*
* Example returns:
*
{hash=jmqfMk7Y3NKw,referrer=------------,mod=20050610,load=20051003,size=51666,wc=1392,cc=0,local=true,q=AEn,dt=h,lang=uk,url=b|aHR0cDovL3d3dy50cmFuc3BhcmVuY3kub3JnL3N1cnZleXMv,descr=b|S25vd2xlZGdlIENlbnRyZTogQ29ycnVwdGlvbiBTdXJ2ZXlzIGFuZCBJbmRpY2Vz}*/ public String toString() { final StringBuffer core = corePropList(); if (core == null) return null; core.insert(0,"{"); core.append("}"); return core.toString(); //return "{" + core + "}"; } public void print() { System.out.println("URL : " + url); System.out.println("Description : " + descr); System.out.println("Modified : " + httpc.dateString(moddate)); System.out.println("Loaded : " + httpc.dateString(loaddate)); System.out.println("Size : " + size + " bytes, " + wordCount + " words"); System.out.println("Referrer Hash : " + referrerHash); System.out.println("Quality : " + quality); System.out.println("Language : " + language); System.out.println("DocType : " + doctype); System.out.println(); } } // class Entry public class kenum implements Enumeration { // enumerates entry elements kelondroTree.rowIterator i; public kenum(boolean up, boolean rotating) throws IOException { i = urlHashCache.rows(up, rotating); } public boolean hasMoreElements() { return i.hasNext(); } public Object nextElement() { return new Entry(new String(((byte[][])i.next())[0])); } } public Enumeration elements(boolean up, boolean rotating) throws IOException { // enumerates entry elements return new kenum(up, rotating); } public static void main(String[] args) { // test-generation of url hashes for debugging // one argument requires, will be treated as url // returns url-hash if (args[0].equals("-h")) try { // arg 1 is url System.out.println("HASH: " + urlHash(new URL(args[1]))); } catch (MalformedURLException e) {} if (args[0].equals("-l")) try { // arg 1 is path to URLCache final plasmaCrawlLURL urls = new plasmaCrawlLURL(new File(args[1]), 1); final Enumeration enu = urls.elements(true, false); while (enu.hasMoreElements()) { ((Entry) enu.nextElement()).print(); } } catch (Exception e) { e.printStackTrace(); } } }