// Fulltext.java
// (C) 2006 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
// first published 2006 as part of 'plasmaCrawlLURL.java' on http://yacy.net
//
// This is a part of YaCy, a peer-to-peer based web search engine
//
// LICENSE
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package net.yacy.search.index ;
import java.io.BufferedOutputStream ;
import java.io.File ;
import java.io.FileOutputStream ;
import java.io.IOException ;
import java.io.PrintWriter ;
import java.net.MalformedURLException ;
import java.util.ArrayList ;
import java.util.Date ;
import java.util.Iterator ;
import java.util.List ;
import java.util.Map ;
import java.util.concurrent.BlockingQueue ;
import java.util.concurrent.atomic.AtomicInteger ;
import java.util.regex.Pattern ;
import net.yacy.cora.date.GenericFormatter ;
import net.yacy.cora.date.ISO8601Formatter ;
import net.yacy.cora.document.ASCII ;
import net.yacy.cora.document.MultiProtocolURI ;
import net.yacy.cora.federate.solr.YaCySchema ;
import net.yacy.cora.federate.solr.connector.AbstractSolrConnector ;
import net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector ;
import net.yacy.cora.federate.solr.connector.MirrorSolrConnector ;
import net.yacy.cora.federate.solr.connector.SolrConnector ;
import net.yacy.cora.order.CloneableIterator ;
import net.yacy.cora.sorting.ReversibleScoreMap ;
import net.yacy.cora.sorting.ScoreMap ;
import net.yacy.cora.storage.ZIPReader ;
import net.yacy.cora.storage.ZIPWriter ;
import net.yacy.document.parser.html.CharacterCoding ;
import net.yacy.kelondro.data.meta.DigestURI ;
import net.yacy.kelondro.data.meta.URIMetadataNode ;
import net.yacy.kelondro.data.meta.URIMetadataRow ;
import net.yacy.kelondro.data.word.WordReference ;
import net.yacy.kelondro.index.Cache ;
import net.yacy.kelondro.index.Index ;
import net.yacy.kelondro.index.Row ;
import net.yacy.kelondro.logging.Log ;
import net.yacy.kelondro.table.SplitTable ;
import net.yacy.kelondro.util.MemoryControl ;
import net.yacy.search.Switchboard ;
import org.apache.commons.httpclient.util.DateUtil ;
import org.apache.solr.common.SolrDocument ;
import org.apache.solr.common.SolrException ;
import org.apache.solr.common.SolrInputDocument ;
import org.apache.lucene.util.Version ;
public final class Fulltext {
private static final String SOLR_PATH = "solr_40" ; // the number should be identical to the number in the property luceneMatchVersion in solrconfig.xml
private static final String SOLR_OLD_PATH [ ] = new String [ ] { "solr_36" } ;
// class objects
private final File location ;
private Index urlIndexFile ;
private Export exportthread ; // will have a export thread assigned if exporter is running
private String tablename ;
private ArrayList < HostStat > statsDump ;
private final MirrorSolrConnector solr ;
private final SolrConfiguration solrScheme ;
protected Fulltext ( final File path , final SolrConfiguration solrScheme ) {
this . location = path ;
this . tablename = null ;
this . urlIndexFile = null ;
this . exportthread = null ; // will have a export thread assigned if exporter is running
this . statsDump = null ;
this . solr = new MirrorSolrConnector ( 10000 , 10000 , 100 ) ;
this . solrScheme = solrScheme ;
}
/ * *
* @deprecated
* used only for migration
* @return the connected URLDb
* /
@Deprecated
public Index getURLDb ( ) {
return this . urlIndexFile ;
}
/ * *
* true if old metadata index URLDb is connected .
* used only for migration
* @deprecated
* current and future versions use Solr for metadata
* /
@Deprecated
public boolean connectedURLDb ( ) {
return this . urlIndexFile ! = null ;
}
protected void connectUrlDb ( final String tablename , final boolean useTailCache , final boolean exceed134217727 ) {
if ( this . urlIndexFile ! = null ) return ;
this . tablename = tablename ;
this . urlIndexFile = new SplitTable ( this . location , tablename , URIMetadataRow . rowdef , useTailCache , exceed134217727 ) ;
// SplitTable always returns != null, even if no file exists.
// as old UrlDb should be null if not exist, check and close if empty
// TODO: check if a SplitTable.open() returning null or error status on not existing file is preferable
if ( this . urlIndexFile . isEmpty ( ) ) {
disconnectUrlDb ( ) ;
}
}
public void disconnectUrlDb ( ) {
if ( this . urlIndexFile = = null ) return ;
this . urlIndexFile . close ( ) ;
this . urlIndexFile = null ;
}
public SolrConfiguration getSolrScheme ( ) {
return this . solrScheme ;
}
public boolean connectedLocalSolr ( ) {
return this . solr . isConnected0 ( ) ;
}
public void connectLocalSolr ( final int commitWithin ) throws IOException {
File baseLocation = this . location ;
if ( baseLocation . getName ( ) . equals ( "default" ) ) baseLocation = baseLocation . getParentFile ( ) ;
File solrLocation = new File ( baseLocation , SOLR_PATH ) ;
// migrate old solr to new
for ( String oldVersion : SOLR_OLD_PATH ) {
File oldLocation = new File ( baseLocation , oldVersion ) ;
if ( oldLocation . exists ( ) ) oldLocation . renameTo ( solrLocation ) ;
}
EmbeddedSolrConnector esc = new EmbeddedSolrConnector ( solrLocation , new File ( new File ( Switchboard . getSwitchboard ( ) . appPath , "defaults" ) , "solr" ) ) ;
if ( commitWithin > = 0 ) esc . setCommitWithinMs ( commitWithin ) ;
Version luceneVersion = esc . getConfig ( ) . getLuceneVersion ( "luceneMatchVersion" ) ;
String lvn = luceneVersion . name ( ) ;
Log . logInfo ( "Fulltext" , "using lucene version " + lvn ) ;
int p = lvn . indexOf ( '_' ) ;
assert SOLR_PATH . endsWith ( lvn . substring ( p ) ) : "luceneVersion = " + lvn + ", solrPath = " + SOLR_PATH + ", p = " + p + ", check defaults/solr/solrconfig.xml" ;
Log . logInfo ( "Fulltext" , "connected solr in " + solrLocation . toString ( ) + ", lucene version " + lvn ) ;
this . solr . connect0 ( esc ) ;
}
public void disconnectLocalSolr ( ) {
this . solr . disconnect0 ( ) ;
}
public boolean connectedRemoteSolr ( ) {
return this . solr . isConnected1 ( ) ;
}
public void connectRemoteSolr ( final SolrConnector rs ) {
this . solr . connect1 ( rs ) ;
}
public void disconnectRemoteSolr ( ) {
this . solr . disconnect1 ( ) ;
}
public SolrConnector getLocalSolr ( ) {
return this . solr . getSolr0 ( ) ;
}
public SolrConnector getRemoteSolr ( ) {
return this . solr . getSolr1 ( ) ;
}
public SolrConnector getSolr ( ) {
return this . solr ;
}
public void clearCache ( ) {
if ( this . urlIndexFile ! = null & & this . urlIndexFile instanceof Cache ) ( ( Cache ) this . urlIndexFile ) . clearCache ( ) ;
if ( this . statsDump ! = null ) this . statsDump . clear ( ) ;
this . statsDump = null ;
this . solr . clearCache ( ) ;
}
public void clearURLIndex ( ) throws IOException {
if ( this . exportthread ! = null ) this . exportthread . interrupt ( ) ;
if ( this . urlIndexFile = = null ) {
SplitTable . delete ( this . location , this . tablename ) ;
} else {
this . urlIndexFile . clear ( ) ;
}
this . statsDump = null ;
this . getSolr ( ) . commit ( true ) ;
}
public void clearLocalSolr ( ) throws IOException {
this . solr . clear0 ( ) ;
}
public void clearRemoteSolr ( ) throws IOException {
this . solr . clear1 ( ) ;
}
public int size ( ) {
int size = this . urlIndexFile = = null ? 0 : this . urlIndexFile . size ( ) ;
size + = this . solr . getSize ( ) ;
return size ;
}
public void close ( ) {
this . statsDump = null ;
if ( this . urlIndexFile ! = null ) {
this . urlIndexFile . close ( ) ;
this . urlIndexFile = null ;
}
this . solr . close ( ) ;
}
public int getCommitWithinMs ( ) {
return this . solr . getCommitWithinMs ( ) ;
}
public void commit ( boolean softCommit ) {
this . solr . commit ( softCommit ) ;
}
public Date getLoadDate ( final String urlHash ) {
if ( urlHash = = null ) return null ;
Date x ;
try {
x = ( Date ) this . solr . getFieldById ( urlHash , YaCySchema . load_date_dt . getSolrFieldName ( ) ) ;
} catch ( IOException e ) {
return null ;
}
return x ;
}
public DigestURI getURL ( final byte [ ] urlHash ) {
if ( urlHash = = null ) return null ;
String x ;
try {
x = ( String ) this . solr . getFieldById ( ASCII . String ( urlHash ) , YaCySchema . sku . getSolrFieldName ( ) ) ;
} catch ( IOException e ) {
return null ;
}
if ( x = = null ) return null ;
try {
DigestURI uri = new DigestURI ( x , urlHash ) ;
return uri ;
} catch ( MalformedURLException e ) {
return null ;
}
}
public URIMetadataNode getMetadata ( WordReference wre , long weight ) {
if ( wre = = null ) return null ; // all time was already wasted in takeRWI to get another element
return getMetadata ( wre . urlhash ( ) , wre , weight ) ;
}
public URIMetadataNode getMetadata ( final byte [ ] urlHash ) {
if ( urlHash = = null ) return null ;
return getMetadata ( urlHash , null , 0 ) ;
}
private URIMetadataNode getMetadata ( final byte [ ] urlHash , WordReference wre , long weight ) {
// get the metadata from Solr
try {
SolrDocument doc = this . solr . getById ( ASCII . String ( urlHash ) ) ;
if ( doc ! = null ) {
if ( this . urlIndexFile ! = null ) this . urlIndexFile . remove ( urlHash ) ;
return new URIMetadataNode ( doc , wre , weight ) ;
}
} catch ( IOException e ) {
Log . logException ( e ) ;
}
// get the metadata from the old metadata index
if ( this . urlIndexFile ! = null ) try {
// slow migration to solr
final Row . Entry entry = this . urlIndexFile . remove ( urlHash ) ;
if ( entry = = null ) return null ;
URIMetadataRow row = new URIMetadataRow ( entry , wre ) ;
SolrInputDocument solrInput = this . solrScheme . metadata2solr ( row ) ;
this . putDocument ( solrInput ) ;
return new URIMetadataNode ( solrInput , wre , weight ) ;
} catch ( final IOException e ) {
Log . logException ( e ) ;
}
return null ;
}
public void putDocument ( final SolrInputDocument doc ) throws IOException {
String id = ( String ) doc . getFieldValue ( YaCySchema . id . getSolrFieldName ( ) ) ;
byte [ ] idb = ASCII . getBytes ( id ) ;
try {
if ( this . urlIndexFile ! = null ) this . urlIndexFile . remove ( idb ) ;
Date sdDate = ( Date ) this . solr . getFieldById ( id , YaCySchema . last_modified . getSolrFieldName ( ) ) ;
Date docDate = null ;
if ( sdDate = = null | | ( docDate = SolrConfiguration . getDate ( doc , YaCySchema . last_modified ) ) = = null | | sdDate . before ( docDate ) ) {
if ( this . solrScheme . contains ( YaCySchema . ip_s ) ) {
// ip_s needs a dns lookup which causes blockings during search here
this . solr . add ( doc ) ;
} else synchronized ( this . solr ) {
this . solr . add ( doc ) ;
}
}
} catch ( SolrException e ) {
throw new IOException ( e . getMessage ( ) , e ) ;
}
this . statsDump = null ;
if ( MemoryControl . shortStatus ( ) ) clearCache ( ) ;
}
public void putMetadata ( final URIMetadataRow entry ) throws IOException {
URIMetadataRow row = entry ;
byte [ ] idb = row . hash ( ) ;
String id = ASCII . String ( idb ) ;
try {
if ( this . urlIndexFile ! = null ) this . urlIndexFile . remove ( idb ) ;
SolrDocument sd = this . solr . getById ( id ) ;
if ( sd = = null | | ( new URIMetadataNode ( sd ) ) . isOlder ( row ) ) {
if ( this . solrScheme . contains ( YaCySchema . ip_s ) ) {
// ip_s needs a dns lookup which causes blockings during search here
this . solr . add ( getSolrScheme ( ) . metadata2solr ( row ) ) ;
} else synchronized ( this . solr ) {
this . solr . add ( getSolrScheme ( ) . metadata2solr ( row ) ) ;
}
}
} catch ( SolrException e ) {
throw new IOException ( e . getMessage ( ) , e ) ;
}
this . statsDump = null ;
if ( MemoryControl . shortStatus ( ) ) clearCache ( ) ;
}
/ * *
* using a fragment of the url hash ( 6 bytes : bytes 6 to 11 ) it is possible to address all urls from a specific domain
* here such a fragment can be used to delete all these domains at once
* @param hosthash the hash of the host to be deleted
* @param freshdate either NULL or a date in the past which is the limit for deletion . Only documents older than this date are deleted
* @throws IOException
* /
public void deleteDomainHashpart ( final String hosthash , Date freshdate , boolean concurrent ) {
// first collect all url hashes that belong to the domain
assert hosthash . length ( ) = = 6 ;
final String q = YaCySchema . host_id_s . getSolrFieldName ( ) + ":\"" + hosthash + "\"" +
( ( freshdate ! = null & & freshdate . before ( new Date ( ) ) ) ? ( " AND " + YaCySchema . load_date_dt . getSolrFieldName ( ) + ":[* TO " + ISO8601Formatter . FORMATTER . format ( freshdate ) + "]" ) : "" ) ;
Thread t = new Thread ( ) {
public void run ( ) {
// delete in solr
synchronized ( Fulltext . this . solr ) {
try {
Fulltext . this . solr . deleteByQuery ( q ) ;
} catch ( IOException e ) { }
}
// delete in old metadata structure
if ( Fulltext . this . urlIndexFile ! = null ) {
final ArrayList < String > l = new ArrayList < String > ( ) ;
synchronized ( this ) {
CloneableIterator < byte [ ] > i ;
try {
i = Fulltext . this . urlIndexFile . keys ( true , null ) ;
String hash ;
while ( i ! = null & & i . hasNext ( ) ) {
hash = ASCII . String ( i . next ( ) ) ;
if ( hosthash . equals ( hash . substring ( 6 ) ) ) l . add ( hash ) ;
}
// then delete the urls using this list
for ( final String h : l ) Fulltext . this . urlIndexFile . delete ( ASCII . getBytes ( h ) ) ;
} catch ( IOException e ) { }
}
}
// finally remove the line with statistics
if ( Fulltext . this . statsDump ! = null ) {
final Iterator < HostStat > hsi = Fulltext . this . statsDump . iterator ( ) ;
HostStat hs ;
while ( hsi . hasNext ( ) ) {
hs = hsi . next ( ) ;
if ( hs . hosthash . equals ( hosthash ) ) {
hsi . remove ( ) ;
break ;
}
}
}
}
} ;
if ( concurrent ) t . start ( ) ; else {
t . run ( ) ;
Fulltext . this . getSolr ( ) . commit ( true ) ;
}
}
public void deleteDomainHostname ( final String hostname , Date freshdate , boolean concurrent ) {
// first collect all url hashes that belong to the domain
final String q = YaCySchema . host_s . getSolrFieldName ( ) + ":\"" + hostname + "\"" +
( ( freshdate ! = null & & freshdate . before ( new Date ( ) ) ) ? ( " AND " + YaCySchema . load_date_dt . getSolrFieldName ( ) + ":[* TO " + ISO8601Formatter . FORMATTER . format ( freshdate ) + "]" ) : "" ) ;
Thread t = new Thread ( ) {
public void run ( ) {
// delete in solr
synchronized ( Fulltext . this . solr ) {
try {
Fulltext . this . solr . deleteByQuery ( q ) ;
} catch ( IOException e ) { }
}
// finally remove the line with statistics
if ( Fulltext . this . statsDump ! = null ) {
final Iterator < HostStat > hsi = Fulltext . this . statsDump . iterator ( ) ;
HostStat hs ;
while ( hsi . hasNext ( ) ) {
hs = hsi . next ( ) ;
if ( hs . hostname . equals ( hostname ) ) {
hsi . remove ( ) ;
break ;
}
}
}
}
} ;
if ( concurrent ) t . start ( ) ; else {
t . run ( ) ;
Fulltext . this . getSolr ( ) . commit ( true ) ;
}
}
/ * *
* remove a full subpath from the index
* @param subpath the left path of the url ; at least until the end of the host
* @param freshdate either NULL or a date in the past which is the limit for deletion . Only documents older than this date are deleted
* @param concurrently if true , then the method returnes immediately and runs concurrently
* /
public int remove ( final String basepath , Date freshdate , final boolean concurrently ) {
DigestURI uri ;
try { uri = new DigestURI ( basepath ) ; } catch ( MalformedURLException e ) { return 0 ; }
final String host = uri . getHost ( ) ;
final String q = YaCySchema . host_s . getSolrFieldName ( ) + ":\"" + host + "\"" +
( ( freshdate ! = null & & freshdate . before ( new Date ( ) ) ) ? ( " AND " + YaCySchema . load_date_dt . getSolrFieldName ( ) + ":[* TO " + ISO8601Formatter . FORMATTER . format ( freshdate ) + "]" ) : "" ) ;
final AtomicInteger count = new AtomicInteger ( 0 ) ;
Thread t = new Thread ( ) {
public void run ( ) {
final BlockingQueue < SolrDocument > docs = getSolr ( ) . concurrentQuery ( q , 0 , 1000000 , 600000 , - 1 , YaCySchema . id . getSolrFieldName ( ) , YaCySchema . sku . getSolrFieldName ( ) ) ;
try {
SolrDocument doc ;
while ( ( doc = docs . take ( ) ) ! = AbstractSolrConnector . POISON_DOCUMENT ) {
String u = ( String ) doc . getFieldValue ( YaCySchema . sku . getSolrFieldName ( ) ) ;
if ( u . startsWith ( basepath ) ) {
remove ( ASCII . getBytes ( ( String ) doc . getFieldValue ( YaCySchema . id . getSolrFieldName ( ) ) ) ) ;
count . incrementAndGet ( ) ;
}
}
if ( count . get ( ) > 0 ) Fulltext . this . solr . commit ( true ) ;
} catch ( InterruptedException e ) { }
}
} ;
if ( concurrently ) t . start ( ) ; else t . run ( ) ;
return count . get ( ) ;
}
/ * *
* remove a list of id ' s from the index
* @param deleteIDs a list of urlhashes ; each denoting a document
* @param concurrently if true , then the method returnes immediately and runs concurrently
* /
public void remove ( final List < byte [ ] > deleteIDs , final boolean concurrently ) {
if ( deleteIDs = = null | | deleteIDs . size ( ) = = 0 ) return ;
Thread t = new Thread ( ) {
public void run ( ) {
try {
synchronized ( Fulltext . this . solr ) {
for ( byte [ ] urlHash : deleteIDs ) {
Fulltext . this . solr . delete ( ASCII . String ( urlHash ) ) ;
}
Fulltext . this . solr . commit ( true ) ;
}
} catch ( final Throwable e ) {
Log . logException ( e ) ;
}
if ( Fulltext . this . urlIndexFile ! = null ) try {
for ( byte [ ] urlHash : deleteIDs ) {
final Row . Entry r = Fulltext . this . urlIndexFile . remove ( urlHash ) ;
if ( r ! = null ) Fulltext . this . statsDump = null ;
}
} catch ( final IOException e ) { }
} } ;
if ( concurrently ) t . start ( ) ; else t . run ( ) ;
}
public boolean remove ( final byte [ ] urlHash ) {
if ( urlHash = = null ) return false ;
try {
synchronized ( this . solr ) {
this . solr . delete ( ASCII . String ( urlHash ) ) ;
}
} catch ( final Throwable e ) {
Log . logException ( e ) ;
}
if ( this . urlIndexFile ! = null ) try {
final Row . Entry r = this . urlIndexFile . remove ( urlHash ) ;
if ( r ! = null ) this . statsDump = null ;
return r ! = null ;
} catch ( final IOException e ) {
return false ;
}
return false ;
}
public boolean exists ( final byte [ ] urlHash ) {
if ( urlHash = = null ) return false ;
if ( this . urlIndexFile ! = null & & this . urlIndexFile . has ( urlHash ) ) return true ;
try {
if ( this . solr . exists ( YaCySchema . id . getSolrFieldName ( ) , ASCII . String ( urlHash ) ) ) return true ;
} catch ( final Throwable e ) {
Log . logException ( e ) ;
}
return false ;
}
public String failReason ( final String urlHash ) throws IOException {
if ( urlHash = = null ) return null ;
String reason = ( String ) this . solr . getFieldById ( urlHash , YaCySchema . failreason_t . getSolrFieldName ( ) ) ;
if ( reason = = null ) return null ;
return reason = = null ? null : reason . length ( ) = = 0 ? null : reason ;
}
public List < File > dumpFiles ( ) {
EmbeddedSolrConnector esc = ( EmbeddedSolrConnector ) this . solr . getSolr0 ( ) ;
ArrayList < File > zips = new ArrayList < File > ( ) ;
if ( esc = = null ) {
Log . logWarning ( "Fulltext" , "HOT DUMP selected solr0 == NULL, no dump list!" ) ;
return zips ;
}
if ( esc . getStoragePath ( ) = = null ) {
Log . logWarning ( "Fulltext" , "HOT DUMP selected solr0.getStoragePath() == NULL, no dump list!" ) ;
return zips ;
}
File storagePath = esc . getStoragePath ( ) . getParentFile ( ) ;
if ( storagePath = = null ) {
Log . logWarning ( "Fulltext" , "HOT DUMP selected esc.getStoragePath().getParentFile() == NULL, no dump list!" ) ;
return zips ;
}
Log . logInfo ( "Fulltext" , "HOT DUMP dump path = " + storagePath . toString ( ) ) ;
for ( String p : storagePath . list ( ) ) {
if ( p . endsWith ( "zip" ) ) zips . add ( new File ( storagePath , p ) ) ;
}
return zips ;
}
/ * *
* create a dump file from the current solr directory
* @return
* /
public File dumpSolr ( ) {
EmbeddedSolrConnector esc = ( EmbeddedSolrConnector ) this . solr . getSolr0 ( ) ;
int commitWithin = esc . getCommitWithinMs ( ) ;
File storagePath = esc . getStoragePath ( ) ;
File zipOut = new File ( storagePath . toString ( ) + "_" + GenericFormatter . SHORT_DAY_FORMATTER . format ( ) + ".zip" ) ;
synchronized ( this . solr ) {
this . disconnectLocalSolr ( ) ;
try {
ZIPWriter . zip ( storagePath , zipOut ) ;
} catch ( IOException e ) {
Log . logException ( e ) ;
} finally {
try {
this . connectLocalSolr ( commitWithin ) ;
} catch ( IOException e ) {
Log . logException ( e ) ;
}
}
}
return zipOut ;
}
/ * *
* restore a solr dump to the current solr directory
* @param solrDumpZipFile
* /
public void restoreSolr ( File solrDumpZipFile ) {
EmbeddedSolrConnector esc = ( EmbeddedSolrConnector ) this . solr . getSolr0 ( ) ;
int commitWithin = esc . getCommitWithinMs ( ) ;
File storagePath = esc . getStoragePath ( ) ;
synchronized ( this . solr ) {
this . disconnectLocalSolr ( ) ;
try {
ZIPReader . unzip ( solrDumpZipFile , storagePath ) ;
} catch ( IOException e ) {
Log . logException ( e ) ;
} finally {
try {
this . connectLocalSolr ( commitWithin ) ;
} catch ( IOException e ) {
Log . logException ( e ) ;
}
}
}
}
// export methods
public Export export ( final File f , final String filter , final int format , final boolean dom ) {
if ( ( this . exportthread ! = null ) & & ( this . exportthread . isAlive ( ) ) ) {
Log . logWarning ( "LURL-EXPORT" , "cannot start another export thread, already one running" ) ;
return this . exportthread ;
}
this . exportthread = new Export ( f , filter , format , dom ) ;
this . exportthread . start ( ) ;
return this . exportthread ;
}
public Export export ( ) {
return this . exportthread ;
}
public class Export extends Thread {
private final File f ;
private final Pattern pattern ;
private int count ;
private String failure ;
private final int format ;
private final boolean dom ;
private Export ( final File f , final String filter , final int format , boolean dom ) {
// format: 0=text, 1=html, 2=rss/xml
this . f = f ;
this . pattern = filter = = null ? null : Pattern . compile ( filter ) ;
this . count = 0 ;
this . failure = null ;
this . format = format ;
this . dom = dom ;
if ( ( dom ) & & ( format = = 2 ) ) dom = false ;
}
@Override
public void run ( ) {
try {
final File parentf = this . f . getParentFile ( ) ;
if ( parentf ! = null ) parentf . mkdirs ( ) ;
final PrintWriter pw = new PrintWriter ( new BufferedOutputStream ( new FileOutputStream ( this . f ) ) ) ;
if ( this . format = = 1 ) {
pw . println ( "<html><head></head><body>" ) ;
}
if ( this . format = = 2 ) {
pw . println ( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" ) ;
pw . println ( "<?xml-stylesheet type='text/xsl' href='/yacysearch.xsl' version='1.0'?>" ) ;
pw . println ( "<rss version=\"2.0\" xmlns:yacy=\"http://www.yacy.net/\" xmlns:opensearch=\"http://a9.com/-/spec/opensearch/1.1/\" xmlns:atom=\"http://www.w3.org/2005/Atom\">" ) ;
pw . println ( "<channel>" ) ;
pw . println ( "<title>YaCy Peer-to-Peer - Web-Search URL Export</title>" ) ;
pw . println ( "<description></description>" ) ;
pw . println ( "<link>http://yacy.net</link>" ) ;
}
if ( this . dom ) {
Map < String , ReversibleScoreMap < String > > scores = Fulltext . this . getSolr ( ) . getFacets ( YaCySchema . httpstatus_i . getSolrFieldName ( ) + ":200" , 100000 , YaCySchema . host_s . getSolrFieldName ( ) ) ;
ReversibleScoreMap < String > stats = scores . get ( YaCySchema . host_s . getSolrFieldName ( ) ) ;
for ( final String host : stats ) {
if ( this . pattern ! = null & & ! this . pattern . matcher ( host ) . matches ( ) ) continue ;
if ( this . format = = 0 ) pw . println ( host ) ;
if ( this . format = = 1 ) pw . println ( "<a href=\"http://" + host + "\">" + host + "</a><br>" ) ;
this . count + + ;
}
} else {
BlockingQueue < SolrDocument > docs = Fulltext . this . getSolr ( ) . concurrentQuery ( YaCySchema . httpstatus_i . getSolrFieldName ( ) + ":200" , 0 , 100000000 , 10 * 60 * 60 * 1000 , 100 ,
YaCySchema . id . getSolrFieldName ( ) , YaCySchema . sku . getSolrFieldName ( ) , YaCySchema . title . getSolrFieldName ( ) ,
YaCySchema . author . getSolrFieldName ( ) , YaCySchema . description . getSolrFieldName ( ) , YaCySchema . size_i . getSolrFieldName ( ) , YaCySchema . last_modified . getSolrFieldName ( ) ) ;
SolrDocument doc ;
ArrayList < ? > title ;
String url , author , description , hash ;
Integer size ;
Date date ;
while ( ( doc = docs . take ( ) ) ! = AbstractSolrConnector . POISON_DOCUMENT ) {
hash = ( String ) doc . getFieldValue ( YaCySchema . id . getSolrFieldName ( ) ) ;
url = ( String ) doc . getFieldValue ( YaCySchema . sku . getSolrFieldName ( ) ) ;
title = ( ArrayList < ? > ) doc . getFieldValue ( YaCySchema . title . getSolrFieldName ( ) ) ;
author = ( String ) doc . getFieldValue ( YaCySchema . author . getSolrFieldName ( ) ) ;
description = ( String ) doc . getFieldValue ( YaCySchema . description . getSolrFieldName ( ) ) ;
size = ( Integer ) doc . getFieldValue ( YaCySchema . size_i . getSolrFieldName ( ) ) ;
date = ( Date ) doc . getFieldValue ( YaCySchema . last_modified . getSolrFieldName ( ) ) ;
if ( this . pattern ! = null & & ! this . pattern . matcher ( url ) . matches ( ) ) continue ;
if ( this . format = = 0 ) {
pw . println ( url ) ;
}
if ( this . format = = 1 ) {
if ( title ! = null ) pw . println ( "<a href=\"" + MultiProtocolURI . escape ( url ) + "\">" + CharacterCoding . unicode2xml ( ( String ) title . iterator ( ) . next ( ) , true ) + "</a>" ) ;
}
if ( this . format = = 2 ) {
pw . println ( "<item>" ) ;
if ( title ! = null ) pw . println ( "<title>" + CharacterCoding . unicode2xml ( ( String ) title . iterator ( ) . next ( ) , true ) + "</title>" ) ;
pw . println ( "<link>" + MultiProtocolURI . escape ( url ) + "</link>" ) ;
if ( author ! = null & & ! author . isEmpty ( ) ) pw . println ( "<author>" + CharacterCoding . unicode2xml ( author , true ) + "</author>" ) ;
if ( description ! = null & & ! description . isEmpty ( ) ) pw . println ( "<description>" + CharacterCoding . unicode2xml ( description , true ) + "</description>" ) ;
if ( date ! = null ) pw . println ( "<pubDate>" + DateUtil . formatDate ( date ) + "</pubDate>" ) ;
if ( size ! = null ) pw . println ( "<yacy:size>" + size . intValue ( ) + "</yacy:size>" ) ;
pw . println ( "<guid isPermaLink=\"false\">" + hash + "</guid>" ) ;
pw . println ( "</item>" ) ;
}
this . count + + ;
}
}
if ( this . format = = 1 ) {
pw . println ( "</body></html>" ) ;
}
if ( this . format = = 2 ) {
pw . println ( "</channel>" ) ;
pw . println ( "</rss>" ) ;
}
pw . close ( ) ;
} catch ( final IOException e ) {
Log . logException ( e ) ;
this . failure = e . getMessage ( ) ;
} catch ( final Exception e ) {
Log . logException ( e ) ;
this . failure = e . getMessage ( ) ;
}
// terminate process
}
public File file ( ) {
return this . f ;
}
public String failed ( ) {
return this . failure ;
}
public int count ( ) {
return this . count ;
}
}
public Iterator < HostStat > statistics ( int count , final ScoreMap < String > domainScore ) {
// prevent too heavy IO.
if ( this . statsDump ! = null & & count < = this . statsDump . size ( ) ) return this . statsDump . iterator ( ) ;
// fetch urls from the database to determine the host in clear text
final Iterator < String > j = domainScore . keys ( false ) ; // iterate urlhash-examples in reverse order (biggest first)
String urlhash ;
count + = 10 ; // make some more to prevent that we have to do this again after deletions too soon.
if ( count < 0 | | domainScore . sizeSmaller ( count ) ) count = domainScore . size ( ) ;
this . statsDump = new ArrayList < HostStat > ( ) ;
DigestURI url ;
while ( j . hasNext ( ) ) {
urlhash = j . next ( ) ;
if ( urlhash = = null ) continue ;
url = this . getURL ( ASCII . getBytes ( urlhash ) ) ;
if ( url = = null | | url . getHost ( ) = = null ) continue ;
if ( this . statsDump = = null ) return new ArrayList < HostStat > ( ) . iterator ( ) ; // some other operation has destroyed the object
this . statsDump . add ( new HostStat ( url . getHost ( ) , url . getPort ( ) , urlhash . substring ( 6 ) , domainScore . get ( urlhash ) ) ) ;
count - - ;
if ( count = = 0 ) break ;
}
// finally return an iterator for the result array
return ( this . statsDump = = null ) ? new ArrayList < HostStat > ( ) . iterator ( ) : this . statsDump . iterator ( ) ;
}
public static class HostStat {
public String hostname , hosthash ;
public int port ;
public int count ;
private HostStat ( final String host , final int port , final String urlhashfragment , final int count ) {
assert urlhashfragment . length ( ) = = 6 ;
this . hostname = host ;
this . port = port ;
this . hosthash = urlhashfragment ;
this . count = count ;
}
}
}