memory hacks

pull/1/head
Michael Peter Christen 13 years ago
parent b4409cc803
commit 4540174fe0

@ -143,7 +143,7 @@ public class BookmarkHelper {
//load the links //load the links
final ContentScraper scraper = new ContentScraper(baseURL); final ContentScraper scraper = new ContentScraper(baseURL);
//OutputStream os = new htmlFilterOutputStream(null, scraper, null, false); //OutputStream os = new htmlFilterOutputStream(null, scraper, null, false);
final Writer writer= new TransformerWriter(null,null,scraper, null, false); final Writer writer = new TransformerWriter(null, null, scraper, null, false);
FileUtils.copy(input,writer); FileUtils.copy(input,writer);
writer.close(); writer.close();
links = scraper.getAnchors(); links = scraper.getAnchors();

@ -1039,18 +1039,18 @@ public final class HTTPDFileHandler {
if (mimeType.startsWith("text")) { if (mimeType.startsWith("text")) {
// every text-file distributed by yacy is UTF-8 // every text-file distributed by yacy is UTF-8
if(!path.startsWith("/repository")) { if (!path.startsWith("/repository")) {
mimeType = mimeType + "; charset=UTF-8"; mimeType = mimeType + "; charset=UTF-8";
} else { } else {
// detect charset of html-files // detect charset of html-files
if((path.endsWith("html") || path.endsWith("htm"))) { if ((path.endsWith("html") || path.endsWith("htm"))) {
// save position // save position
fis.mark(1000); fis.mark(1000);
// scrape document to look up charset // scrape document to look up charset
final ScraperInputStream htmlFilter = new ScraperInputStream(fis,"UTF-8",new DigestURI("http://localhost"),null,false); final ScraperInputStream htmlFilter = new ScraperInputStream(fis, "UTF-8", new DigestURI("http://localhost"), null, false);
final String charset = htmlParser.patchCharsetEncoding(htmlFilter.detectCharset()); final String charset = htmlParser.patchCharsetEncoding(htmlFilter.detectCharset());
if(charset != null) htmlFilter.close();
mimeType = mimeType + "; charset="+charset; if (charset != null) mimeType = mimeType + "; charset="+charset;
// reset position // reset position
fis.reset(); fis.reset();
} }

@ -485,17 +485,24 @@ public class ContentScraper extends AbstractScraper implements Scraper {
final TransformerWriter writer = new TransformerWriter(null, null, scraper, null, false); final TransformerWriter writer = new TransformerWriter(null, null, scraper, null, false);
try { try {
FileUtils.copy(new CharArrayReader(inlineHtml), writer); FileUtils.copy(new CharArrayReader(inlineHtml), writer);
writer.close();
} catch (final IOException e) { } catch (final IOException e) {
Log.logException(e); Log.logException(e);
return cleanLine(super.stripAll(inlineHtml)); return cleanLine(super.stripAll(inlineHtml));
} finally {
scraper.close();
try {
writer.close();
} catch (IOException e) {
}
} }
for (final Map.Entry<MultiProtocolURI, Properties> entry: scraper.getAnchors().entrySet()) { for (final Map.Entry<MultiProtocolURI, Properties> entry: scraper.getAnchors().entrySet()) {
mergeAnchors(entry.getKey(), entry.getValue()); mergeAnchors(entry.getKey(), entry.getValue());
} }
this.images.putAll(scraper.images); this.images.putAll(scraper.images);
return cleanLine(super.stripAll(scraper.content.getChars())); String line = cleanLine(super.stripAll(scraper.content.getChars()));
scraper.close();
return line;
} }
private final static String cleanLine(final String s) { private final static String cleanLine(final String s) {
@ -885,14 +892,14 @@ public class ContentScraper extends AbstractScraper implements Scraper {
// scrape document to look up charset // scrape document to look up charset
final ScraperInputStream htmlFilter = new ScraperInputStream(new ByteArrayInputStream(page),"UTF-8", new MultiProtocolURI("http://localhost"),null,false); final ScraperInputStream htmlFilter = new ScraperInputStream(new ByteArrayInputStream(page),"UTF-8", new MultiProtocolURI("http://localhost"),null,false);
String charset = htmlParser.patchCharsetEncoding(htmlFilter.detectCharset()); String charset = htmlParser.patchCharsetEncoding(htmlFilter.detectCharset());
if(charset == null) htmlFilter.close();
charset = Charset.defaultCharset().toString(); if (charset == null) charset = Charset.defaultCharset().toString();
// scrape content // scrape content
final ContentScraper scraper = new ContentScraper(new MultiProtocolURI("http://localhost")); final ContentScraper scraper = new ContentScraper(new MultiProtocolURI("http://localhost"));
final Writer writer = new TransformerWriter(null, null, scraper, null, false); final Writer writer = new TransformerWriter(null, null, scraper, null, false);
FileUtils.copy(new ByteArrayInputStream(page), writer, Charset.forName(charset)); FileUtils.copy(new ByteArrayInputStream(page), writer, Charset.forName(charset));
writer.close();
return scraper; return scraper;
} }

@ -34,7 +34,6 @@ import java.util.TreeSet;
import net.yacy.cora.document.ASCII; import net.yacy.cora.document.ASCII;
import net.yacy.kelondro.io.CharBuffer; import net.yacy.kelondro.io.CharBuffer;
import net.yacy.kelondro.logging.Log;
public class ContentTransformer extends AbstractTransformer implements Transformer { public class ContentTransformer extends AbstractTransformer implements Transformer {
@ -90,11 +89,7 @@ public class ContentTransformer extends AbstractTransformer implements Transform
} }
bb.append("</FONT> "); bb.append("</FONT> ");
final char[] result = bb.getChars(); final char[] result = bb.getChars();
try { bb.close();
bb.close();
} catch (IOException e) {
Log.logException(e);
}
return result; return result;
} }

@ -9,7 +9,7 @@
// $LastChangedBy$ // $LastChangedBy$
// //
// LICENSE // LICENSE
// //
// This program is free software; you can redistribute it and/or modify // This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by // it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or // the Free Software Foundation; either version 2 of the License, or
@ -39,11 +39,11 @@ import net.yacy.cora.document.MultiProtocolURI;
public class ScraperInputStream extends InputStream implements ScraperListener { public class ScraperInputStream extends InputStream implements ScraperListener {
private static final int MODE_PRESCAN = 0; private static final int MODE_PRESCAN = 0;
private static final int MODE_PRESCAN_FINISHED = 1; private static final int MODE_PRESCAN_FINISHED = 1;
private int mode = 1; private int mode = 1;
private static final long preBufferSize = 4096; private static final long preBufferSize = 4096;
private long preRead = 0; private long preRead = 0;
private final BufferedInputStream bufferedIn; private final BufferedInputStream bufferedIn;
@ -51,10 +51,10 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
private String detectedCharset; private String detectedCharset;
private boolean charsetChanged = false; private boolean charsetChanged = false;
private boolean endOfHead = false; private boolean endOfHead = false;
private Reader reader; private Reader reader;
private Writer writer; private Writer writer;
public ScraperInputStream( public ScraperInputStream(
final InputStream inStream, final InputStream inStream,
final String inputStreamCharset, final String inputStreamCharset,
@ -65,10 +65,10 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
// create a input stream for buffereing // create a input stream for buffereing
this.bufferedIn = new BufferedInputStream(inStream, (int) preBufferSize); this.bufferedIn = new BufferedInputStream(inStream, (int) preBufferSize);
this.bufferedIn.mark((int) preBufferSize); this.bufferedIn.mark((int) preBufferSize);
final ContentScraper scraper = new ContentScraper(rooturl); final ContentScraper scraper = new ContentScraper(rooturl);
scraper.registerHtmlFilterEventListener(this); scraper.registerHtmlFilterEventListener(this);
try { try {
this.reader = (inputStreamCharset == null) ? new InputStreamReader(this) : new InputStreamReader(this,inputStreamCharset); this.reader = (inputStreamCharset == null) ? new InputStreamReader(this) : new InputStreamReader(this,inputStreamCharset);
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
@ -78,17 +78,17 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
// how is that possible? // how is that possible?
this.reader = new InputStreamReader(this); this.reader = new InputStreamReader(this);
} }
} }
this.writer = new TransformerWriter(null,null,scraper,transformer,passbyIfBinarySuspect); this.writer = new TransformerWriter(null,null,scraper,transformer,passbyIfBinarySuspect);
} }
private static String extractCharsetFromMimetypeHeader(final String mimeType) { private static String extractCharsetFromMimetypeHeader(final String mimeType) {
if (mimeType == null) return null; if (mimeType == null) return null;
final String[] parts = mimeType.split(";"); final String[] parts = mimeType.split(";");
if (parts == null || parts.length <= 1) return null; if (parts == null || parts.length <= 1) return null;
for (int i=1; i < parts.length; i++) { for (int i=1; i < parts.length; i++) {
final String param = parts[i].trim(); final String param = parts[i].trim();
if (param.startsWith("charset=")) { if (param.startsWith("charset=")) {
String charset = param.substring("charset=".length()).trim(); String charset = param.substring("charset=".length()).trim();
@ -97,13 +97,14 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
return charset.trim(); return charset.trim();
} }
} }
return null; return null;
} }
@Override
public void scrapeTag0(final String tagname, final Properties tagopts) { public void scrapeTag0(final String tagname, final Properties tagopts) {
if (tagname == null || tagname.length() == 0) return; if (tagname == null || tagname.length() == 0) return;
if (tagname.equalsIgnoreCase("meta")) { if (tagname.equalsIgnoreCase("meta")) {
if (tagopts.containsKey("http-equiv")) { if (tagopts.containsKey("http-equiv")) {
final String value = tagopts.getProperty("http-equiv"); final String value = tagopts.getProperty("http-equiv");
@ -113,7 +114,7 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
this.detectedCharset = extractCharsetFromMimetypeHeader(contentType); this.detectedCharset = extractCharsetFromMimetypeHeader(contentType);
if (this.detectedCharset != null && this.detectedCharset.length() > 0) { if (this.detectedCharset != null && this.detectedCharset.length() > 0) {
this.charsetChanged = true; this.charsetChanged = true;
} else if (tagopts.containsKey("charset")) { } else if (tagopts.containsKey("charset")) {
// sometimes the charset property is configured as extra attribut. try it ... // sometimes the charset property is configured as extra attribut. try it ...
this.detectedCharset = tagopts.getProperty("charset"); this.detectedCharset = tagopts.getProperty("charset");
this.charsetChanged = true; this.charsetChanged = true;
@ -123,48 +124,54 @@ public class ScraperInputStream extends InputStream implements ScraperListener {
} }
} }
@Override
public void scrapeTag1(final String tagname, final Properties tagopts, final char[] text) { public void scrapeTag1(final String tagname, final Properties tagopts, final char[] text) {
if (tagname == null || tagname.length() == 0) return; if (tagname == null || tagname.length() == 0) return;
if (tagname.equalsIgnoreCase("head")) { if (tagname.equalsIgnoreCase("head")) {
this.endOfHead = true; this.endOfHead = true;
} }
} }
public String detectCharset() throws IOException { public String detectCharset() throws IOException {
this.mode = MODE_PRESCAN; this.mode = MODE_PRESCAN;
// loop until we have detected the header element or the charset data // loop until we have detected the header element or the charset data
int c; int c;
while ((c = this.reader.read())!= -1) { while ((c = this.reader.read())!= -1) {
this.writer.write(c); this.writer.write(c);
if (this.charsetChanged) break; // thats enough if (this.charsetChanged) break; // thats enough
} }
// free writer // free writer
this.writer = null; this.writer = null;
// don't close writer here, otherwise it will shutdown our source stream // don't close writer here, otherwise it will shutdown our source stream
// reset the buffer if not already done // reset the buffer if not already done
if (this.mode != MODE_PRESCAN_FINISHED) { if (this.mode != MODE_PRESCAN_FINISHED) {
this.mode++; this.mode++;
this.bufferedIn.reset(); this.bufferedIn.reset();
} }
// return scanning result // return scanning result
return (this.charsetChanged) ? this.detectedCharset : null; return (this.charsetChanged) ? this.detectedCharset : null;
} }
@Override
public int read() throws IOException { public int read() throws IOException {
// mode 0 is called from within the detectCharset function // mode 0 is called from within the detectCharset function
if (this.mode == MODE_PRESCAN) { if (this.mode == MODE_PRESCAN) {
if (this.endOfHead || this.charsetChanged || this.preRead >= preBufferSize - 1) { if (this.endOfHead || this.charsetChanged || this.preRead >= preBufferSize - 1) {
return -1; return -1;
} }
this.preRead++; this.preRead++;
} }
return this.bufferedIn.read(); return this.bufferedIn.read();
} }
@Override
public void close() throws IOException {
if (this.writer != null) this.writer.close();
}
} }

@ -127,11 +127,7 @@ public final class TransformerWriter extends Writer {
} }
bb.append('>'); bb.append('>');
final char[] result = bb.getChars(); final char[] result = bb.getChars();
try { bb.close();
bb.close();
} catch (final IOException e) {
Log.logException(e);
}
return result; return result;
} }
@ -147,11 +143,7 @@ public final class TransformerWriter extends Writer {
bb.append(text); bb.append(text);
bb.append('<').append('/').append(tagname).append('>'); bb.append('<').append('/').append(tagname).append('>');
final char[] result = bb.getChars(); final char[] result = bb.getChars();
try { bb.close();
bb.close();
} catch (final IOException e) {
Log.logException(e);
}
return result; return result;
} }
@ -165,11 +157,7 @@ public final class TransformerWriter extends Writer {
} }
bb.append('>'); bb.append('>');
final char[] result = bb.getChars(); final char[] result = bb.getChars();
try { bb.close();
bb.close();
} catch (final IOException e) {
Log.logException(e);
}
return result; return result;
} }
@ -178,11 +166,7 @@ public final class TransformerWriter extends Writer {
final CharBuffer cb = new CharBuffer(ContentScraper.MAX_DOCSIZE, gt0, gt0.length + text.length + tagname.length() + 3); final CharBuffer cb = new CharBuffer(ContentScraper.MAX_DOCSIZE, gt0, gt0.length + text.length + tagname.length() + 3);
cb.append(text).append('<').append('/').append(tagname).append('>'); cb.append(text).append('<').append('/').append(tagname).append('>');
final char[] result = cb.getChars(); final char[] result = cb.getChars();
try { cb.close();
cb.close();
} catch (final IOException e) {
Log.logException(e);
}
return result; return result;
} }
@ -202,11 +186,7 @@ public final class TransformerWriter extends Writer {
result = bb.getChars(1); result = bb.getChars(1);
else else
result = bb.getChars(); result = bb.getChars();
try { bb.close();
bb.close();
} catch (final IOException ex) {
Log.logException(ex);
}
return result; return result;
} }
@ -227,12 +207,7 @@ public final class TransformerWriter extends Writer {
// this single tag is collected at once here // this single tag is collected at once here
final CharBuffer charBuffer = new CharBuffer(ContentScraper.MAX_DOCSIZE, content); final CharBuffer charBuffer = new CharBuffer(ContentScraper.MAX_DOCSIZE, content);
this.scraper.scrapeTag0(tag, charBuffer.propParser()); this.scraper.scrapeTag0(tag, charBuffer.propParser());
try { charBuffer.close();
charBuffer.close();
} catch (final IOException e) {
// TODO Auto-generated catch block
Log.logException(e);
}
} }
if ((this.transformer != null) && (this.transformer.isTag0(tag))) { if ((this.transformer != null) && (this.transformer.isTag0(tag))) {
// this single tag is collected at once here // this single tag is collected at once here
@ -240,11 +215,7 @@ public final class TransformerWriter extends Writer {
try { try {
return this.transformer.transformTag0(tag, scb.propParser(), quotechar); return this.transformer.transformTag0(tag, scb.propParser(), quotechar);
} finally { } finally {
try { scb.close();
scb.close();
} catch (final IOException e) {
Log.logException(e);
}
} }
} else if (((this.scraper != null) && (this.scraper.isTag1(tag))) || } else if (((this.scraper != null) && (this.scraper.isTag1(tag))) ||
((this.transformer != null) && (this.transformer.isTag1(tag)))) { ((this.transformer != null) && (this.transformer.isTag1(tag)))) {
@ -252,11 +223,7 @@ public final class TransformerWriter extends Writer {
this.filterTag = tag; this.filterTag = tag;
final CharBuffer scb = new CharBuffer(ContentScraper.MAX_DOCSIZE, content); final CharBuffer scb = new CharBuffer(ContentScraper.MAX_DOCSIZE, content);
this.filterOpts = scb.propParser(); this.filterOpts = scb.propParser();
try { scb.close();
scb.close();
} catch (final IOException e) {
Log.logException(e);
}
if (this.filterCont == null) this.filterCont = new CharBuffer(ContentScraper.MAX_DOCSIZE, Math.max(100, content.length)); else this.filterCont.reset(); if (this.filterCont == null) this.filterCont = new CharBuffer(ContentScraper.MAX_DOCSIZE, Math.max(100, content.length)); else this.filterCont.reset();
return new char[0]; return new char[0];
} else { } else {

@ -144,14 +144,13 @@ public class pdfParser extends AbstractParser implements Parser {
try { try {
writer.append(stripper.getText(pdfDoc)); writer.append(stripper.getText(pdfDoc));
} catch (final Throwable e) {} } catch (final Throwable e) {}
} }
}; };
t.start(); t.start();
t.join(3000); t.join(3000);
if (t.isAlive()) t.interrupt(); if (t.isAlive()) t.interrupt();
pdfDoc.close(); pdfDoc.close();
contentBytes = writer.getBytes(); // get final text before closing writer contentBytes = writer.getBytes(); // get final text before closing writer
writer.close();
} catch (final IOException e) { } catch (final IOException e) {
// close the writer // close the writer
if (writer != null) try { writer.close(); } catch (final Exception ex) {} if (writer != null) try { writer.close(); } catch (final Exception ex) {}
@ -166,6 +165,7 @@ public class pdfParser extends AbstractParser implements Parser {
//throw new Parser.Failure(e.getMessage(), location); //throw new Parser.Failure(e.getMessage(), location);
} finally { } finally {
try {pdfDoc.close();} catch (final IOException e) {} try {pdfDoc.close();} catch (final IOException e) {}
writer.close();
} }
String[] docKeywords = null; String[] docKeywords = null;
@ -175,7 +175,7 @@ public class pdfParser extends AbstractParser implements Parser {
if (docTitle == null) { if (docTitle == null) {
docTitle = docSubject; docTitle = docSubject;
} }
// clear resources in pdfbox. they say that is resolved but it's not. see: // clear resources in pdfbox. they say that is resolved but it's not. see:
// https://issues.apache.org/jira/browse/PDFBOX-313 // https://issues.apache.org/jira/browse/PDFBOX-313
// https://issues.apache.org/jira/browse/PDFBOX-351 // https://issues.apache.org/jira/browse/PDFBOX-351

@ -189,7 +189,7 @@ public class URIMetadataRow implements URIMetadata {
final String dc_publisher, final String dc_publisher,
final float lat, final float lat,
final float lon) { final float lon) {
final CharBuffer s = new CharBuffer(20000, 360); final CharBuffer s = new CharBuffer(3600, 360);
s.append(url.toNormalform(false, true)).appendLF(); s.append(url.toNormalform(false, true)).appendLF();
s.append(dc_title).appendLF(); s.append(dc_title).appendLF();
if (dc_creator.length() > 80) s.append(dc_creator, 0, 80); else s.append(dc_creator); if (dc_creator.length() > 80) s.append(dc_creator, 0, 80); else s.append(dc_creator);

@ -130,7 +130,7 @@ public final class CharBuffer extends Writer {
} }
private void grow(int minSize) { private void grow(int minSize) {
int newsize = 2 * Math.max(this.buffer.length, minSize); int newsize = 12 * Math.max(this.buffer.length, minSize) / 10; // grow by 20%
char[] tmp = new char[newsize]; char[] tmp = new char[newsize];
System.arraycopy(this.buffer, this.offset, tmp, 0, this.length); System.arraycopy(this.buffer, this.offset, tmp, 0, this.length);
this.buffer = tmp; this.buffer = tmp;
@ -478,15 +478,12 @@ public final class CharBuffer extends Writer {
this.offset = 0; this.offset = 0;
} }
public void reset(final int newSize) { /**
this.resize(newSize); * call trimToSize() whenever a CharBuffer is not extended any more and is kept to store the content permanently
this.reset(); */
} public void trimToSize() {
final char[] v = new char[this.length];
public void resize(final int newSize) { System.arraycopy(this.buffer, this.offset, v, 0, this.length);
if(newSize < 0) throw new IllegalArgumentException("Illegal array size: " + newSize);
final char[] v = new char[newSize];
System.arraycopy(this.buffer,0,v,0,newSize > this.buffer.length ? this.buffer.length : newSize);
this.buffer = v; this.buffer = v;
} }
@ -497,13 +494,15 @@ public final class CharBuffer extends Writer {
} }
@Override @Override
public void close() throws IOException { public void close() {
this.length = 0;
this.offset = 0;
this.buffer = null; // assist with garbage collection this.buffer = null; // assist with garbage collection
} }
@Override @Override
public void flush() throws IOException { public void flush() {
// TODO Auto-generated method stub trimToSize();
} }
} }
Loading…
Cancel
Save