You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
307 lines
10 KiB
307 lines
10 KiB
// plasmaCrawlZURL.java
|
|
// (C) 2007 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
|
|
// first published 15.03.2007 on http://www.anomic.de
|
|
//
|
|
// This is a part of YaCy, a peer-to-peer based web search engine
|
|
//
|
|
// $LastChangedDate: 2006-04-02 22:40:07 +0200 (So, 02 Apr 2006) $
|
|
// $LastChangedRevision: 1986 $
|
|
// $LastChangedBy: orbiter $
|
|
//
|
|
// LICENSE
|
|
//
|
|
// This program is free software; you can redistribute it and/or modify
|
|
// it under the terms of the GNU General Public License as published by
|
|
// the Free Software Foundation; either version 2 of the License, or
|
|
// (at your option) any later version.
|
|
//
|
|
// This program is distributed in the hope that it will be useful,
|
|
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
// GNU General Public License for more details.
|
|
//
|
|
// You should have received a copy of the GNU General Public License
|
|
// along with this program; if not, write to the Free Software
|
|
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
package de.anomic.crawler;
|
|
|
|
import java.io.File;
|
|
import java.io.IOException;
|
|
import java.util.Date;
|
|
import java.util.Iterator;
|
|
import java.util.concurrent.ConcurrentLinkedQueue;
|
|
|
|
import net.yacy.kelondro.data.meta.DigestURI;
|
|
import net.yacy.kelondro.data.word.Word;
|
|
import net.yacy.kelondro.index.ObjectIndex;
|
|
import net.yacy.kelondro.index.Row;
|
|
import net.yacy.kelondro.index.RowSet;
|
|
import net.yacy.kelondro.logging.Log;
|
|
import net.yacy.kelondro.order.Base64Order;
|
|
import net.yacy.kelondro.table.SplitTable;
|
|
import net.yacy.kelondro.table.Table;
|
|
import net.yacy.kelondro.util.FileUtils;
|
|
|
|
import de.anomic.crawler.retrieval.Request;
|
|
|
|
public class ZURL implements Iterable<ZURL.Entry> {
|
|
|
|
private static final int EcoFSBufferSize = 2000;
|
|
private static final int maxStackSize = 1000;
|
|
|
|
public final static Row rowdef = new Row(
|
|
"String urlhash-" + Word.commonHashLength + ", " + // the url's hash
|
|
"String executor-" + Word.commonHashLength + ", " + // the crawling executor
|
|
"Cardinal workdate-8 {b256}, " + // the time when the url was last time tried to load
|
|
"Cardinal workcount-4 {b256}, " + // number of load retries
|
|
"String anycause-132, " + // string describing load failure
|
|
"byte[] entry-" + Request.rowdef.objectsize, // extra space
|
|
Base64Order.enhancedCoder
|
|
);
|
|
|
|
// the class object
|
|
private final ObjectIndex urlIndex;
|
|
private final ConcurrentLinkedQueue<String> stack;
|
|
|
|
public ZURL(
|
|
final File cachePath,
|
|
final String tablename,
|
|
final boolean startWithEmptyFile,
|
|
final boolean useTailCache,
|
|
final boolean exceed134217727) {
|
|
// creates a new ZURL in a file
|
|
cachePath.mkdirs();
|
|
final File f = new File(cachePath, tablename);
|
|
if (startWithEmptyFile) {
|
|
if (f.exists()) {
|
|
if (f.isDirectory()) SplitTable.delete(cachePath, tablename); else FileUtils.deletedelete(f);
|
|
}
|
|
}
|
|
this.urlIndex = new Table(f, rowdef, EcoFSBufferSize, 0, useTailCache, exceed134217727);
|
|
//urlIndex = new kelondroFlexTable(cachePath, tablename, -1, rowdef, 0, true);
|
|
this.stack = new ConcurrentLinkedQueue<String>();
|
|
}
|
|
|
|
public ZURL() {
|
|
// creates a new ZUR in RAM
|
|
this.urlIndex = new RowSet(rowdef, 0);
|
|
this.stack = new ConcurrentLinkedQueue<String>();
|
|
}
|
|
|
|
public int size() {
|
|
return urlIndex.size() ;
|
|
}
|
|
|
|
public void clear() throws IOException {
|
|
if (urlIndex != null) urlIndex.clear();
|
|
if (stack != null) stack.clear();
|
|
}
|
|
|
|
public void close() {
|
|
try {this.clear();} catch (IOException e) {}
|
|
if (urlIndex != null) urlIndex.close();
|
|
}
|
|
|
|
public boolean remove(final String hash) {
|
|
if (hash == null) return false;
|
|
//System.out.println("*** DEBUG ZURL " + this.urlIndex.filename() + " remove " + hash);
|
|
try {
|
|
urlIndex.remove(hash.getBytes());
|
|
return true;
|
|
} catch (final IOException e) {
|
|
return false;
|
|
}
|
|
}
|
|
|
|
public void push(
|
|
final Request bentry,
|
|
final String executor,
|
|
final Date workdate,
|
|
final int workcount,
|
|
String anycause) {
|
|
assert executor != null;
|
|
assert executor.length() > 0;
|
|
if (anycause == null) anycause = "unknown";
|
|
Entry entry = new Entry(bentry, executor, workdate, workcount, anycause);
|
|
entry.store();
|
|
stack.add(entry.hash());
|
|
while (stack.size() > maxStackSize) stack.poll();
|
|
}
|
|
|
|
public Iterator<ZURL.Entry> iterator() {
|
|
return new EntryIterator();
|
|
}
|
|
|
|
private class EntryIterator implements Iterator<ZURL.Entry> {
|
|
private Iterator<String> hi;
|
|
public EntryIterator() {
|
|
this.hi = stack.iterator();
|
|
}
|
|
public boolean hasNext() {
|
|
return hi.hasNext();
|
|
}
|
|
|
|
public ZURL.Entry next() {
|
|
return getEntry(hi.next());
|
|
}
|
|
|
|
public void remove() {
|
|
hi.remove();
|
|
}
|
|
|
|
}
|
|
|
|
public ZURL.Entry getEntry(final String urlhash) {
|
|
try {
|
|
if (urlIndex == null) return null;
|
|
//System.out.println("*** DEBUG ZURL " + this.urlIndex.filename() + " get " + urlhash);
|
|
final Row.Entry entry = urlIndex.get(urlhash.getBytes());
|
|
if (entry == null) return null;
|
|
return new Entry(entry);
|
|
} catch (final IOException e) {
|
|
Log.logException(e);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
public boolean exists(final String urlHash) {
|
|
return urlIndex.has(urlHash.getBytes());
|
|
}
|
|
|
|
public void clearStack() {
|
|
stack.clear();
|
|
}
|
|
|
|
public int stackSize() {
|
|
return stack.size();
|
|
}
|
|
|
|
public class Entry {
|
|
|
|
Request bentry; // the balancer entry
|
|
private final String executor; // the crawling executor
|
|
private final Date workdate; // the time when the url was last time tried to load
|
|
private final int workcount; // number of tryings
|
|
private final String anycause; // string describing reason for load fail
|
|
private boolean stored;
|
|
|
|
private Entry(
|
|
final Request bentry,
|
|
final String executor,
|
|
final Date workdate,
|
|
final int workcount,
|
|
final String anycause) {
|
|
// create new entry
|
|
assert bentry != null;
|
|
assert executor != null;
|
|
this.bentry = bentry;
|
|
this.executor = executor;
|
|
this.workdate = (workdate == null) ? new Date() : workdate;
|
|
this.workcount = workcount;
|
|
this.anycause = (anycause == null) ? "" : anycause;
|
|
stored = false;
|
|
}
|
|
|
|
private Entry(final Row.Entry entry) throws IOException {
|
|
assert (entry != null);
|
|
this.executor = entry.getColString(1, "UTF-8");
|
|
this.workdate = new Date(entry.getColLong(2));
|
|
this.workcount = (int) entry.getColLong(3);
|
|
this.anycause = entry.getColString(4, "UTF-8");
|
|
this.bentry = new Request(Request.rowdef.newEntry(entry.getColBytes(5)));
|
|
assert ((new String(entry.getColBytes(0))).equals(bentry.url().hash()));
|
|
this.stored = true;
|
|
return;
|
|
}
|
|
|
|
protected void store() {
|
|
// stores the values from the object variables into the database
|
|
if (this.stored) return;
|
|
if (this.bentry == null) return;
|
|
final Row.Entry newrow = rowdef.newEntry();
|
|
newrow.setCol(0, this.bentry.url().hash().getBytes());
|
|
newrow.setCol(1, this.executor.getBytes());
|
|
newrow.setCol(2, this.workdate.getTime());
|
|
newrow.setCol(3, this.workcount);
|
|
newrow.setCol(4, this.anycause.getBytes());
|
|
newrow.setCol(5, this.bentry.toRow().bytes());
|
|
try {
|
|
//System.out.println("*** DEBUG ZURL " + urlIndex.filename() + " store " + newrow.getColString(0, "UTF-8"));
|
|
if (urlIndex != null) urlIndex.put(newrow);
|
|
this.stored = true;
|
|
} catch (final IOException e) {
|
|
System.out.println("INTERNAL ERROR AT plasmaEURL:url2hash:" + e.toString());
|
|
}
|
|
}
|
|
|
|
public DigestURI url() {
|
|
return this.bentry.url();
|
|
}
|
|
|
|
public String initiator() {
|
|
return this.bentry.initiator();
|
|
}
|
|
|
|
public String hash() {
|
|
// return a url-hash, based on the md5 algorithm
|
|
// the result is a String of 12 bytes within a 72-bit space
|
|
// (each byte has an 6-bit range)
|
|
// that should be enough for all web pages on the world
|
|
return this.bentry.url().hash();
|
|
}
|
|
|
|
public Date workdate() {
|
|
return workdate;
|
|
}
|
|
|
|
public String executor() {
|
|
// return the creator's hash
|
|
return executor;
|
|
}
|
|
|
|
public String anycause() {
|
|
return anycause;
|
|
}
|
|
|
|
}
|
|
|
|
public class kiter implements Iterator<Entry> {
|
|
// enumerates entry elements
|
|
Iterator<Row.Entry> i;
|
|
boolean error = false;
|
|
|
|
public kiter(final boolean up, final String firstHash) throws IOException {
|
|
i = urlIndex.rows(up, (firstHash == null) ? null : firstHash.getBytes());
|
|
error = false;
|
|
}
|
|
|
|
public boolean hasNext() {
|
|
if (error) return false;
|
|
return i.hasNext();
|
|
}
|
|
|
|
public Entry next() throws RuntimeException {
|
|
final Row.Entry e = i.next();
|
|
if (e == null) return null;
|
|
try {
|
|
return new Entry(e);
|
|
} catch (final IOException ex) {
|
|
throw new RuntimeException("error '" + ex.getMessage() + "' for hash " + e.getColString(0, null));
|
|
}
|
|
}
|
|
|
|
public void remove() {
|
|
i.remove();
|
|
}
|
|
|
|
}
|
|
|
|
public Iterator<Entry> entries(final boolean up, final String firstHash) throws IOException {
|
|
// enumerates entry elements
|
|
return new kiter(up, firstHash);
|
|
}
|
|
|
|
}
|
|
|