*) adding missing calls for function close() to avoid "too many open file" bug

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@282 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
theli 20 years ago
parent 9a98988c3c
commit 9e47ba5ad6

@ -276,16 +276,20 @@ public final class httpHeader extends TreeMap implements Map {
// convenience methods for storing and loading to a file system
public void store(File f) throws IOException {
FileOutputStream fos = new FileOutputStream(f);
Iterator i = keySet().iterator();
String key, value;
while (i.hasNext()) {
key = (String) i.next();
value = (String) get(key);
fos.write((key + "=" + value + "\r\n").getBytes());
}
fos.flush();
fos.close();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(f);
Iterator i = keySet().iterator();
String key, value;
while (i.hasNext()) {
key = (String) i.next();
value = (String) get(key);
fos.write((key + "=" + value + "\r\n").getBytes());
}
fos.flush();
} finally {
if (fos != null) try{fos.close();}catch(Exception e){}
}
}
public String toString() {

@ -1117,8 +1117,8 @@ public final class httpd implements serverHandler {
ByteArrayOutputStream o = new ByteArrayOutputStream();
fis = new FileInputStream(file);
httpTemplate.writeTemplate(fis, o, tp, "-UNRESOLVED_PATTERN-".getBytes());
o.close();
result = o.toByteArray();
o.close(); o = null;
httpHeader header = new httpHeader();
header.put(httpHeader.DATE, httpc.dateString(httpc.nowDate()));

@ -455,7 +455,7 @@ public final class httpdFileHandler extends httpdAbstractHandler implements http
}
} finally {
if (zippedOut != null) try {zippedOut.close();} catch(Exception e) {}
if (o != null) try {o.close();} catch(Exception e) {}
if (o != null) try {o.close(); o = null;} catch(Exception e) {}
if (fis != null) try {fis.close();} catch(Exception e) {}
}

@ -350,30 +350,41 @@ public class kelondroDyn extends kelondroTree {
}
public synchronized void writeFile(String key, File f) throws IOException {
// reads a file from the FS and writes it into the database
kelondroRA kra = getRA(key);
byte[] buffer = new byte[1024];
byte[] result = new byte[(int) f.length()];
FileInputStream fis = new FileInputStream(f);
int i;
int pos = 0;
while ((i = fis.read(buffer)) > 0) {
System.arraycopy(buffer, 0, result, pos, i);
pos += i;
}
fis.close();
kra.writeArray(result);
kra.close();
// reads a file from the FS and writes it into the database
kelondroRA kra = null;
FileInputStream fis = null;
try {
kra = getRA(key);
byte[] buffer = new byte[1024];
byte[] result = new byte[(int) f.length()];
fis = new FileInputStream(f);
int i;
int pos = 0;
while ((i = fis.read(buffer)) > 0) {
System.arraycopy(buffer, 0, result, pos, i);
pos += i;
}
fis.close();
kra.writeArray(result);
} finally {
if (fis != null) try{fis.close();}catch(Exception e){}
if (kra != null) try{kra.close();}catch(Exception e){}
}
}
public synchronized void readFile(String key, File f) throws IOException {
// reads a file from the DB and writes it to the FS
kelondroRA kra = getRA(key);
byte[] result = kra.readArray();
FileOutputStream fos = new FileOutputStream(f);
fos.write(result);
fos.close();
kra.close();
// reads a file from the DB and writes it to the FS
kelondroRA kra = null;
FileOutputStream fos = null;
try {
kra = getRA(key);
byte[] result = kra.readArray();
fos = new FileOutputStream(f);
fos.write(result);
} finally {
if (fos != null) try{fos.close();}catch(Exception e){}
if (kra != null) try{kra.close();}catch(Exception e){}
}
}
public static void main(String[] args) {

@ -115,10 +115,12 @@ public abstract class AbstractParser implements Parser{
BufferedInputStream contentInputStream = null;
try {
contentInputStream = new BufferedInputStream(new FileInputStream(sourceFile));
return this.parse(location, mimeType, contentInputStream);
} catch (FileNotFoundException e) {
e.printStackTrace();
throw new ParserException(e.getMessage());
} finally {
if (contentInputStream != null) try{contentInputStream.close();}catch(Exception e){}
}
return this.parse(location, mimeType, contentInputStream);
}
/**

@ -283,9 +283,13 @@ public final class plasmaCrawlWorker extends Thread {
} else if ((profile == null) || ((profile.storeHTCache()) && ((error = htCache.shallStoreCache()) == null))) {
// we write the new cache entry to file system directly
cacheFile.getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(cacheFile);
htCache.cacheArray = res.writeContent(fos); // writes in cacheArray and cache file
fos.close();
FileOutputStream fos = null;
try {
fos = new FileOutputStream(cacheFile);
htCache.cacheArray = res.writeContent(fos); // writes in cacheArray and cache file
} finally {
if (fos!=null)try{fos.close();}catch(Exception e){}
}
htCache.status = plasmaHTCache.CACHE_FILL;
} else {
if (error != null) log.logDebug("CRAWLER NOT STORED RESOURCE " + url.toString() + ": " + error);

@ -45,21 +45,18 @@
package de.anomic.plasma;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.OutputStream;
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
@ -264,11 +261,14 @@ public final class plasmaParser {
public static String getMimeTypeByFileExt(String fileExt) {
// loading a list of extensions from file
Properties prop = new Properties();
try {
prop.load(new FileInputStream(new File("httpd.mime")));
BufferedInputStream bufferedIn = null;
try {
prop.load(bufferedIn = new BufferedInputStream(new FileInputStream(new File("httpd.mime"))));
} catch (IOException e) {
System.err.println("ERROR: httpd.mime not found in settings path");
}
} finally {
if (bufferedIn != null) try{bufferedIn.close();}catch(Exception e){}
}
return prop.getProperty(fileExt,"application/octet-stream");
}
@ -342,11 +342,14 @@ public final class plasmaParser {
private static void loadEnabledParserList() {
// loading a list of availabe parser from file
Properties prop = new Properties();
BufferedInputStream bufferedIn = null;
try {
prop.load(new FileInputStream(new File("yacy.parser")));
prop.load(bufferedIn = new BufferedInputStream(new FileInputStream(new File("yacy.parser"))));
} catch (IOException e) {
System.err.println("ERROR: yacy.parser not found in settings path");
}
} finally {
if (bufferedIn != null) try{ bufferedIn.close(); }catch(Exception e){}
}
// enable them ...
setEnabledParserList(prop.keySet());

@ -45,37 +45,46 @@ public class plasmaStore {
// some static helper methods
public static void saveGzip(File f, byte[] content) throws IOException {
f.getParentFile().mkdirs();
java.util.zip.GZIPOutputStream gzipout = new java.util.zip.GZIPOutputStream(new FileOutputStream(f));
gzipout.write(content, 0, content.length);
gzipout.close();
java.util.zip.GZIPOutputStream gzipout = null;
try {
f.getParentFile().mkdirs();
gzipout = new java.util.zip.GZIPOutputStream(new FileOutputStream(f));
gzipout.write(content, 0, content.length);
} finally {
if (gzipout!=null)try{gzipout.close();}catch(Exception e){}
}
}
public static byte[] loadGzip(File f) throws IOException {
java.util.zip.GZIPInputStream gzipin = new java.util.zip.GZIPInputStream(new FileInputStream(f));
byte[] result = new byte[1024];
byte[] buffer = new byte[512];
byte[] b;
int len = 0;
int last;
while ((last = gzipin.read(buffer, 0, buffer.length)) > 0) {
// assert the buffer to the result
while (result.length - len < last) {
// the result array is too small, increase space
b = new byte[result.length * 2];
System.arraycopy(result, 0, b, 0, len);
result = b; b = null;
}
// copy the last read
System.arraycopy(buffer, 0, result, len, last);
len += last;
}
gzipin.close();
// finished with reading. now cut the result to the right size
b = new byte[len];
System.arraycopy(result, 0, b, 0, len);
result = null;
return b;
java.util.zip.GZIPInputStream gzipin = null;
try {
gzipin = new java.util.zip.GZIPInputStream(new FileInputStream(f));
byte[] result = new byte[1024];
byte[] buffer = new byte[512];
byte[] b;
int len = 0;
int last;
while ((last = gzipin.read(buffer, 0, buffer.length)) > 0) {
// assert the buffer to the result
while (result.length - len < last) {
// the result array is too small, increase space
b = new byte[result.length * 2];
System.arraycopy(result, 0, b, 0, len);
result = b; b = null;
}
// copy the last read
System.arraycopy(buffer, 0, result, len, last);
len += last;
}
gzipin.close();
// finished with reading. now cut the result to the right size
b = new byte[len];
System.arraycopy(result, 0, b, 0, len);
result = null;
return b;
} finally {
if (gzipin != null) try{gzipin.close();}catch(Exception e){}
}
}
/* public static void saveProperties(File f, Properties props, String comment) throws IOException {

@ -420,16 +420,21 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
private static TreeSet loadList(File file) {
TreeSet list = new TreeSet(kelondroMSetTools.fastStringComparator);
if (!(file.exists())) return list;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
String line;
while ((line = br.readLine()) != null) {
line = line.trim();
if ((line.length() > 0) && (!(line.startsWith("#")))) list.add(line.trim().toLowerCase());
}
br.close();
} catch (IOException e) {}
return list;
BufferedReader br = null;
try {
br = new BufferedReader(new InputStreamReader(new FileInputStream(file)));
String line;
while ((line = br.readLine()) != null) {
line = line.trim();
if ((line.length() > 0) && (!(line.startsWith("#")))) list.add(line.trim().toLowerCase());
}
br.close();
} catch (IOException e) {
} finally {
if (br != null) try{br.close();}catch(Exception e){}
}
return list;
}
public void close() {

@ -40,6 +40,8 @@
package de.anomic.server;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
@ -131,31 +133,38 @@ public abstract class serverAbstractSwitch implements serverSwitch {
}
public static Map loadHashMap(File f) {
// load props
Properties prop = new Properties();
try {
prop.load(new FileInputStream(f));
} catch (IOException e1) {
System.err.println("ERROR: " + f.toString() + " not found in settings path");
prop = null;
}
return (Hashtable) prop;
// load props
Properties prop = new Properties();
BufferedInputStream bufferedIn = null;
try {
prop.load(bufferedIn = new BufferedInputStream(new FileInputStream(f)));
} catch (IOException e1) {
System.err.println("ERROR: " + f.toString() + " not found in settings path");
prop = null;
} finally {
if (bufferedIn != null)try{bufferedIn.close();}catch(Exception e){}
}
return (Hashtable) prop;
}
public static void saveMap(File f, Map props, String comment) throws IOException {
PrintWriter pw = new PrintWriter(new FileOutputStream(f));
pw.println("# " + comment);
Iterator i = props.entrySet().iterator();
String key, value;
Map.Entry entry;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
key = (String) entry.getKey();
value = ((String) entry.getValue()).replaceAll("\n", "\\\\n");
pw.println(key + "=" + value);
}
pw.println("# EOF");
pw.close();
PrintWriter pw = null;
try {
pw = new PrintWriter(new BufferedOutputStream(new FileOutputStream(f)));
pw.println("# " + comment);
Iterator i = props.entrySet().iterator();
String key, value;
Map.Entry entry;
while (i.hasNext()) {
entry = (Map.Entry) i.next();
key = (String) entry.getKey();
value = ((String) entry.getValue()).replaceAll("\n", "\\\\n");
pw.println(key + "=" + value);
}
pw.println("# EOF");
} finally {
if (pw!=null)try{pw.close();}catch(Exception e){}
}
}
public void setConfig(String key, long value) {

@ -1,5 +1,6 @@
package de.anomic.yacy.seedUpload;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
@ -116,12 +117,17 @@ class sshc {
checkAck(in);
// send a content of lfile
FileInputStream fis=new FileInputStream(localFile);
byte[] buf=new byte[1024];
while(true){
int len=fis.read(buf, 0, buf.length);
if(len<=0) break;
out.write(buf, 0, len); out.flush();
BufferedInputStream bufferedIn = null;
try {
bufferedIn=new BufferedInputStream(new FileInputStream(localFile));
while(true){
int len=bufferedIn.read(buf, 0, buf.length);
if(len<=0) break;
out.write(buf, 0, len); out.flush();
}
} finally {
if (bufferedIn != null) try{bufferedIn.close();}catch(Exception e){}
}
// send '\0'

@ -183,27 +183,31 @@ public class yacyPeerActions {
}
private disorderSet loadSuperseed(File local, String url) {
// this returns a list of locations where seed list-files can be found
disorderSet supsee = new disorderSet();
String line;
// read in local file
// this returns a list of locations where seed list-files can be found
disorderSet supsee = new disorderSet();
String line;
// read in local file
int lc = 0;
BufferedReader br = null;
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(local)));
while ((line = br.readLine()) != null) {
line = line.trim();
//System.out.println("one line in file:" + line);
if (line.length() > 0) supsee.add(line);
}
br.close();
br = new BufferedReader(new InputStreamReader(new FileInputStream(local)));
while ((line = br.readLine()) != null) {
line = line.trim();
//System.out.println("one line in file:" + line);
if (line.length() > 0) supsee.add(line);
}
br.close();
lc = supsee.size();
yacyCore.log.logInfo("BOOTSTRAP: " + lc + " seed-list urls from superseed file " + local.toString());
} catch (IOException e) {
//e.printStackTrace();
supsee = new disorderSet();
} catch (IOException e) {
//e.printStackTrace();
supsee = new disorderSet();
yacyCore.log.logInfo("BOOTSTRAP: failed to load seed-list urls from superseed file " + local.toString() + ": " + e.getMessage());
}
// read in remote file from url
} finally {
if (br!=null)try{br.close();}catch(Exception e){}
}
// read in remote file from url
try {
Vector remote = httpc.wget(new URL(url), 5000, null, null, sb.remoteProxyHost, sb.remoteProxyPort);
if ((remote != null) && (remote.size() > 0)) {

Loading…
Cancel
Save