privatestaticPatterni1=Pattern.compile("Received (\\d*) URLs from peer [\\w-_]{12}:[\\w-_]*/[\\w.-]* in (\\d*) ms, Blocked (\\d*) URLs");
privatestaticPatterni2=Pattern.compile("Received (\\d*) Entries (\\d*) Words \\[[\\w-_]{12} .. [\\w-_]{12}\\]/[\\w.-]* from [\\w-_]{12}:[\\w-_]*/[\\w.-]*, processed in (\\d*) milliseconds, requesting (\\d*)/(\\d*) URLs, blocked (\\d*) RWIs");
privatestaticPatterni2_2=Pattern.compile("Received (\\d*) Entries (\\d*) Words \\[[\\w-_]{12} .. [\\w-_]{12}\\]/[\\w.-]* from [\\w-_]{12}:[\\w-_]*, processed in (\\d*) milliseconds, requesting (\\d*)/(\\d*) URLs, blocked (\\d*) RWIs");
privatestaticPatterni3=Pattern.compile("Index transfer of (\\d*) words \\[[\\w-_]{12} .. [\\w-_]{12}\\] to peer ([\\w-_]*):([\\w-_]{12}) in (\\d*) seconds successful \\((\\d*) words/s, (\\d*) Bytes\\)");
privatestaticPatterni4=Pattern.compile("Index transfer of (\\d*) entries (\\d*) words \\[[\\w-_]{12} .. [\\w-_]{12}\\] and (\\d*) URLs to peer ([\\w-_]*):([\\w-_]{12}) in (\\d*) seconds successful \\((\\d*) words/s, (\\d*) Bytes\\)");
privatestaticPatterni13=Pattern.compile("WROTE HEADER for |LOCALCRAWL\\[\\d*, \\d*, \\d*, \\d*\\]|REJECTED WRONG STATUS TYPE");
//RegExp for LogLevel W
privatestaticPatternw1=Pattern.compile("found not enough \\(\\d*\\) peers for distribution");
privatestaticPatternw2=Pattern.compile("Transfer to peer ([\\w-_]*):([\\w-_]{12}) failed:'(\\w*)'");
//RegExp for LogLevel E
privatestaticPatterne1=Pattern.compile("INTERNAL ERROR AT plasmaCrawlLURL:store:de.anomic.kelondro.kelondroException: tried to create (\\w*) node twice in db");
System.out.println("INDEXER: Indexed "+indexedSites+" sites in "+(indexedStackingTime+indexedParsingTime+indexedIndexingTime+indexedStorageTime)+" milliseconds.");
System.out.println("INDEXER: Indexed "+indexedWordSum+" words on "+indexedSites+" sites. (avg. words per site: "+(indexedWordSum/indexedSites)+").");
System.out.println("INDEXER: Total Size of indexed sites: "+indexedSiteSizeSum+" bytes (avg. size per site: "+(indexedSiteSizeSum/indexedSites)+" bytes).");
System.out.println("INDEXER: Total Number of Anchors found: "+indexedAnchorsCount+"(avg. Anchors per site: "+(indexedAnchorsCount/indexedSites)+").");
System.out.println("INDEXER: Total StackingTime: "+indexedStackingTime+" milliseconds (avg. StackingTime: "+(indexedStackingTime/indexedSites)+" milliseconds).");
System.out.println("INDEXER: Total ParsingTime: "+indexedParsingTime+" milliseconds (avg. ParsingTime: "+(indexedParsingTime/indexedSites)+" milliseconds).");
System.out.println("INDEXER: Total IndexingTime: "+indexedIndexingTime+" milliseconds (avg. IndexingTime: "+(indexedIndexingTime/indexedSites)+" milliseconds).");
System.out.println("INDEXER: Total StorageTime: "+indexedStorageTime+" milliseconds (avg. StorageTime: "+(indexedStorageTime/indexedSites)+" milliseconds).");
if(urlSum!=0)urlSum++;
System.out.println("DHT: Recieved "+urlSum+" Urls in "+urlTimeSum+" ms. Blocked "+blockedURLSum+" URLs.");
System.out.println("DHT: "+urlTimeSum/urlSum+" milliseconds per URL.");
if(rwiSum!=0)rwiSum++;
System.out.println("DHT: Recieved "+rwiSum+" RWIs from "+wordsSum+" Words in "+rwiTimeSum+" ms. "+urlReqSum+" requested URLs.");
System.out.println("DHT: Blocked "+blockedRWISum+" RWIs before requesting URLs, because URL-Hash was blacklisted.");
System.out.println("DHT: "+rwiTimeSum/rwiSum+" milliseconds per RWI.");
System.out.println("DHT: Rejected "+RWIRejectCount+" Indextransfers from "+RWIRejectPeerNames.size()+" PeerNames with "+RWIRejectPeerHashs.size()+" PeerHashs.");
System.out.println("DHT: "+((double)Math.round(DHTSendTraffic*100/(1024*1024)))/100+" MegaBytes ("+DHTSendTraffic+" Bytes) of DHT-Transfertraffic.");
System.out.println("DHT: Sended "+DHTSendURLs+" URLs via DHT.");
System.out.println("DHT: DHT Transfers send to "+DHTPeerNames.size()+" Peernames with "+DHTPeerHashs.size()+" Peerhashs.");
System.out.println("DHT: Totally selected "+DHTSelectionWordsCount+" words in "+DHTSelectionWordsTimeCount+" seconds ("+(float)DHTSelectionWordsCount/DHTSelectionWordsTimeCount+" words/s)");