changed release strategy:

we will provide two different releases in the future, one standard release and one 'pro'-release.
the 'pro'-release contains all additional parsers AND has different default performance values.
The pro-version differs therefore from the previous 'all'-version by this default values.
The pro-configuration is automatically choosen if the libx-folder exists. If a version is once initialized, its configuration stays independently from an existing libx folder.
The ant targets had been changed. There are now 3 different targets to create standard and pro-releases, and one target to upgrade:
- dist: creates a standard release (only, no libx target any more)
- distPro: creates a pro-release (includes the libx)
- distExt: creates a libx-release which includes the libx-folder only. It may be used to upgrade from standard to pro
Furthermore, the naming of 'dev'-releases had been removed.

git-svn-id: https://svn.berlios.de/svnroot/repos/yacy/trunk@3902 6c8d7289-2bf4-0310-a012-ef5d649a1542
pull/1/head
orbiter 18 years ago
parent d8ab7a4710
commit 6518bb6c08

@ -3,11 +3,9 @@ javacSource=1.4
javacTarget=1.4
# Release Configuration
releaseVersion=0.525
releaseFile=yacy_dev_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
#releaseFile=yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
releaseDir=yacy_dev_v${releaseVersion}_${DSTAMP}_${releaseNr}
#releaseDir=yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}
releaseVersion=0.526
releaseFile=yacy_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
proReleaseFile=yacy_pro_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
releaseFileParentDir=yacy
releaseNr=$Revision$
@ -18,7 +16,4 @@ accessRightsFile=644
# Extension Configuration: singleZip|multiZip|copyToMain
extensionMode=singleZip
extensionFile=yacy_libx_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
# All in one file configuration
allInOneFile=yacy_all_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz
extensionFile=yacy_libx_v${releaseVersion}_${DSTAMP}_${releaseNr}.tar.gz

@ -174,8 +174,9 @@
<!--<mkdir dir="${doc}"/>-->
<mkdir dir="${data}"/>
<mkdir dir="${release}"/>
<mkdir dir="${release}"/>
<!-- YaCy Release Date -->
<filter token="REPL_DATE" value="${DSTAMP}"/>
<filter token="REPL_RELEASE" value="${releaseFile}"/>
@ -194,6 +195,7 @@
<delete file="${build}/yacy.java" failonerror="false"/>
<copy file="${src}/yacy.java" tofile="${build}/yacy.java" overwrite="true" filtering="true" />
<!--<copy file="${doc}/Download.html" tofile="${doc}/Download.html.up" filtering="true" />-->
</target>
<target name="compileMain" depends="init" description="Compiling the yacy sources ...">
@ -273,7 +275,6 @@
<property name="lib" location="${lib}"/>
<property name="libx" location="${libx}"/>
<property name="release" location="${release_ext}"/>
<property name="releaseDir" value="${releaseDir}"/>
<property name="javacSource" value="${javacSource}"/>
<property name="javacTarget" value="${javacTarget}"/>
<!-- each optional parser must have its own build file -->
@ -289,7 +290,6 @@
<property name="libx" location="${libx}"/>
<property name="htroot" value="${htroot}"/>
<property name="release" location="${release_ext}"/>
<property name="releaseDir" value="${releaseDir}"/>
<property name="javacSource" value="${javacSource}"/>
<property name="javacTarget" value="${javacTarget}"/>
<!-- each optionl uploader module must have its own build file -->
@ -305,7 +305,6 @@
<property name="libx" location="${libx}"/>
<property name="htroot" value="${htroot}"/>
<property name="release" location="${release_ext}"/>
<property name="releaseDir" value="${releaseDir}"/>
<property name="javacSource" value="${javacSource}"/>
<property name="javacTarget" value="${javacTarget}"/>
<fileset dir="${src}/" includes="de/anomic/soap/build.xml"/>
@ -319,7 +318,6 @@
<property name="libx" location="${libx}"/>
<property name="htroot" value="${htroot}"/>
<property name="release" location="${release_ext}"/>
<property name="releaseDir" value="${releaseDir}"/>
<property name="javacSource" value="${javacSource}"/>
<property name="javacTarget" value="${javacTarget}"/>
<fileset dir="${src}/" includes="de/anomic/soap/build.xml"/>
@ -392,7 +390,7 @@
<!-- =======================================================================================================
putting all extensions together into a single zip file
======================================================================================================= -->
<target name="distExtensions" if="singleExtFile" depends="copyExt4Dist">
<target name="distExt" if="singleExtFile" depends="copyExt4Dist">
<tar destfile="${release}/${extensionFile}" compression="gzip" defaultexcludes="yes" longfile="gnu">
<tarfileset dir="${release_ext}" prefix="${releaseFileParentDir}/" dirmode="${accessRightsDir}" mode="${accessRightsFile}" >
<include name="**/*.*"/>
@ -581,7 +579,7 @@
<!-- =======================================================================================================
making a release file for yacy
======================================================================================================= -->
<target name="distMain" depends="copyMain4Dist" description="Compiling sources and make a release file ...">
<target name="dist" depends="copyMain4Dist" description="Compiling sources and make a release file ...">
<tar destfile="${release}/${releaseFile}" compression="gzip" defaultexcludes="yes" longfile="fail">
@ -604,8 +602,8 @@
<!--<echo message="${releaseVersion}" file="${doc}/release.txt"/>-->
</target>
<target name="distAllInOne" depends="compileExtensions,copyMain4Dist,copyExt4Dist">
<tar destfile="${release}/${allInOneFile}" compression="gzip" defaultexcludes="yes" longfile="fail">
<target name="distPro" depends="compileExtensions,copyMain4Dist,copyExt4Dist">
<tar destfile="${release}/${proReleaseFile}" compression="gzip" defaultexcludes="yes" longfile="fail">
<!-- packing all files into a gzipped tar -->
<tarfileset dir="${release_main}" prefix="${releaseFileParentDir}/" dirmode="${accessRightsDir}" mode="${accessRightsExecutable}">
@ -628,10 +626,6 @@
<delete dir="${release}/WINDOWS" failonerror="false" />
<delete dir="${release}/SPECS" failonerror="false" />
</target>
<!-- make dist file including main sources and extensions -->
<target name="dist" depends="distMain,distExtensions">
</target>
<!-- make clean -->
<target name="clean" description="make clean">
@ -813,7 +807,7 @@ This needs nsis-ant-1.2.jar in the yacyfolder.
-->
<target name="distWithWinStartExe" depends="makeWinStartExe,distAllInOne">
<target name="distWithWinStartExe" depends="makeWinStartExe,distPro">
</target>
<target name="rpm" depends="all" description="Creates the main rpm-package and another for libx">

@ -82,36 +82,36 @@ public class AccessTracker_p {
TreeMap access;
Map.Entry entry;
if (host.length() > 0) {
access = switchboard.accessTrack(host);
if (access != null) {
try {
Iterator ii = access.entrySet().iterator();
while (ii.hasNext()) {
entry = (Map.Entry) ii.next();
prop.put("page_list_" + entCount + "_host", host);
prop.put("page_list_" + entCount + "_date", yacyCore.universalDateShortString(new Date(((Long) entry.getKey()).longValue())));
prop.put("page_list_" + entCount + "_path", (String) entry.getValue());
entCount++;
}} catch (ConcurrentModificationException e) {} // we dont want to synchronize this
}
} else {
Iterator i = switchboard.accessHosts();
while ((entCount < maxCount) && (i.hasNext())) {
host = (String) i.next();
access = switchboard.accessTrack(host);
try {
Iterator ii = access.entrySet().iterator();
while (ii.hasNext()) {
entry = (Map.Entry) ii.next();
prop.put("page_list_" + entCount + "_host", host);
prop.put("page_list_" + entCount + "_date", yacyCore.universalDateShortString(new Date(((Long) entry.getKey()).longValue())));
prop.put("page_list_" + entCount + "_path", (String) entry.getValue());
entCount++;
}} catch (ConcurrentModificationException e) {} // we dont want to synchronize this
}
}
access = switchboard.accessTrack(host);
if (access != null) {
try {
Iterator ii = access.entrySet().iterator();
while (ii.hasNext()) {
entry = (Map.Entry) ii.next();
prop.put("page_list_" + entCount + "_host", host);
prop.put("page_list_" + entCount + "_date", yacyCore.universalDateShortString(new Date(((Long) entry.getKey()).longValue())));
prop.put("page_list_" + entCount + "_path", (String) entry.getValue());
entCount++;
}
} catch (ConcurrentModificationException e) {} // we dont want to synchronize this
}
} else {
try {
Iterator i = switchboard.accessHosts();
while ((entCount < maxCount) && (i.hasNext())) {
host = (String) i.next();
access = switchboard.accessTrack(host);
Iterator ii = access.entrySet().iterator();
while (ii.hasNext()) {
entry = (Map.Entry) ii.next();
prop.put("page_list_" + entCount + "_host", host);
prop.put("page_list_" + entCount + "_date", yacyCore.universalDateShortString(new Date(((Long) entry.getKey()).longValue())));
prop.put("page_list_" + entCount + "_path", (String) entry.getValue());
entCount++;
}
}
} catch (ConcurrentModificationException e) {} // we dont want to synchronize this
}
prop.put("page_list", entCount);
prop.put("page_num", entCount);
}

@ -48,7 +48,6 @@
// if the shell's current path is HTROOT
import java.io.File;
import java.lang.reflect.Method;
import java.lang.Integer;
import java.util.regex.Pattern;
@ -63,7 +62,6 @@ import de.anomic.server.serverCore;
import de.anomic.server.serverInstantThread;
import de.anomic.server.serverObjects;
import de.anomic.server.serverSwitch;
import de.anomic.server.portForwarding.serverPortForwarding;
import de.anomic.yacy.yacyCore;
import de.anomic.yacy.yacySeed;
@ -241,7 +239,7 @@ public class ConfigBasic {
}
return prop;
}
/*
private boolean findUPnPRouter(int timeout) {
// determine if the upnp port forwarding class is available and load it dynamically
@ -256,8 +254,8 @@ public class ConfigBasic {
if ((result != null)&&(result instanceof Boolean)) {
return ((Boolean)result).booleanValue();
}
} catch (Exception e) {/* ignore this error */
} catch (Error e) {/* ignore this error */}
} catch (Exception e) { // ignore this error
} catch (Error e) {} // ignore this error
return false;
}
@ -274,12 +272,12 @@ public class ConfigBasic {
// trying to get the proper method for router scanning
scanForRouter = upnp.getClass().getMethod("routerAvailable", new Class[] {int.class});
} catch (Exception e) {/* ignore this error */
} catch (Error e) {/* ignore this error */}
} catch (Exception e) { // ignore this error
} catch (Error e) {} // ignore this error
return new Object[]{upnp,scanForRouter};
}
*/
private void reinitPortForwarding(serverObjects post, serverSwitch env) {
if ((post != null)) {
try {

@ -50,8 +50,8 @@ public final class httpdSwitchboard extends serverAbstractSwitch implements serv
private final LinkedList cacheStack;
public httpdSwitchboard(String rootPath, String initPath, String configPath) {
super(rootPath, initPath, configPath);
public httpdSwitchboard(String rootPath, String initPath, String configPath, boolean applyPro) {
super(rootPath, initPath, configPath, applyPro);
cacheStack = new LinkedList();
}

@ -167,7 +167,8 @@ public class kelondroFlexWidthArray implements kelondroArray {
public static void delete(File path, String tablename) {
File tabledir = new File(path, tablename);
if ((tabledir.exists()) && (!(tabledir.isDirectory()))) {
if (!(tabledir.exists())) return;
if ((!(tabledir.isDirectory()))) {
tabledir.delete();
return;
}

@ -57,9 +57,10 @@ public class plasmaCrawlZURL {
private kelondroIndex urlIndexFile = null;
private LinkedList rejectedStack = new LinkedList(); // strings: url
public plasmaCrawlZURL(File cachePath, String tablename) {
public plasmaCrawlZURL(File cachePath, String tablename, boolean startWithEmptyFile) {
// creates a new ZURL in a file
cachePath.mkdirs();
if (startWithEmptyFile) kelondroFlexTable.delete(cachePath, tablename);
urlIndexFile = new kelondroFlexTable(cachePath, tablename, -1, rowdef, true);
}

@ -100,18 +100,18 @@ public final class plasmaParser {
* A list containing all installed parsers and the mimeType that they support
* @see #loadAvailableParserList()
*/
static final Properties availableParserList = new Properties();
public static final Properties availableParserList = new Properties();
/**
* A list of file extensions that are supported by the html-parser and can
* be parsed in realtime.
*/
static final HashSet supportedRealtimeFileExt = new HashSet();
public static final HashSet supportedRealtimeFileExt = new HashSet();
/**
* A list of mimeTypes that can be parsed in Realtime (on the fly)
*/
static final HashSet realtimeParsableMimeTypes = new HashSet();
public static final HashSet realtimeParsableMimeTypes = new HashSet();
private static final Properties mimeTypeLookupByFileExt = new Properties();
static {

@ -886,13 +886,15 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
private static plasmaSwitchboard sb;
public plasmaSwitchboard(String rootPath, String initPath, String configPath) {
super(rootPath, initPath, configPath);
public plasmaSwitchboard(String rootPath, String initPath, String configPath, boolean applyPro) {
super(rootPath, initPath, configPath, applyPro);
sb=this;
// set loglevel and log
setLog(new serverLog("PLASMA"));
if (applyPro) this.log.logInfo("This is the pro-version of YaCy");
// load values from configs
this.plasmaPath = new File(rootPath, getConfig(DBPATH, DBPATH_DEFAULT));
this.log.logConfig("Plasma DB Path: " + this.plasmaPath.toString());
@ -1067,9 +1069,9 @@ public final class plasmaSwitchboard extends serverAbstractSwitch implements ser
// start indexing management
log.logConfig("Starting Indexing Management");
noticeURL = new plasmaCrawlNURL(plasmaPath);
errorURL = new plasmaCrawlZURL(); // fresh error DB each startup; can be hold in RAM and reduces IO;
//errorURL = new plasmaCrawlZURL(plasmaPath, "urlError.db");
delegatedURL = new plasmaCrawlZURL(plasmaPath, "urlDelegated.db");
//errorURL = new plasmaCrawlZURL(); // fresh error DB each startup; can be hold in RAM and reduces IO;
errorURL = new plasmaCrawlZURL(plasmaPath, "urlError.db", true);
delegatedURL = new plasmaCrawlZURL(plasmaPath, "urlDelegated.db", false);
wordIndex = new plasmaWordIndex(indexPrimaryPath, indexSecondaryPath, ramRWI_time, log);
// set a high maximum cache size to current size; this is adopted later automatically

@ -69,14 +69,14 @@ public abstract class serverAbstractSwitch implements serverSwitch {
protected HashMap accessTracker; // mappings from requesting host to an ArrayList of serverTrack-entries
protected long maxTrackingTime;
public serverAbstractSwitch(String rootPath, String initPath, String configPath) {
public serverAbstractSwitch(String rootPath, String initPath, String configPath, boolean applyPro) {
// we initialize the switchboard with a property file,
// but maintain these properties then later in a new 'config' file
// to reset all changed configs, the config file must
// be deleted, but not the init file
// the only attribute that will always be read from the init is the
// file name of the config file
this.rootPath = rootPath;
this.rootPath = rootPath;
configComment = "This is an automatically generated file, updated by serverAbstractSwitch and initialized by " + initPath;
File initFile = new File(rootPath, initPath);
configFile = new File(rootPath, configPath); // propertiesFile(config);
@ -89,7 +89,28 @@ public abstract class serverAbstractSwitch implements serverSwitch {
initProps = serverFileUtils.loadHashMap(initFile);
else
initProps = new HashMap();
// if 'pro'-version is selected, overload standard settings with 'pro'-settings
Iterator i;
String prop;
if (applyPro) {
i = new HashMap(initProps).keySet().iterator(); // clone the map to avoid concurrent modification exceptions
while (i.hasNext()) {
prop = (String) i.next();
if (prop.endsWith("__pro")) {
initProps.put(prop.substring(0, prop.length() - 5), initProps.get(prop));
}
}
}
// delete the 'pro' init settings
i = initProps.keySet().iterator();
while (i.hasNext()) {
prop = (String) i.next();
if (prop.endsWith("__pro")) {
i.remove();
}
}
// load config's from last save
if (configFile.exists())
configProps = serverFileUtils.loadHashMap(configFile);
@ -99,7 +120,7 @@ public abstract class serverAbstractSwitch implements serverSwitch {
synchronized (configProps) {
// remove all values from config that do not appear in init
// (out-dated settings)
Iterator i = configProps.keySet().iterator();
i = configProps.keySet().iterator();
String key;
while (i.hasNext()) {
key = (String) i.next();

@ -28,8 +28,8 @@ package de.anomic.server;
public class serverPlainSwitch extends serverAbstractSwitch implements serverSwitch {
public serverPlainSwitch(String rootPath, String initPath, String configPath) {
super(rootPath, initPath, configPath);
public serverPlainSwitch(String rootPath, String initPath, String configPath, boolean applyPro) {
super(rootPath, initPath, configPath, applyPro);
}
public int queueSize() {

@ -80,7 +80,7 @@ public class BlacklistService extends AbstractService {
private static final String LIST_MANAGER_LISTS_PATH = "listManager.listsPath";
private static final String BLACKLISTS = ".BlackLists";
private static final String BLACKLISTS_TYPES = "BlackLists.types";
//private static final String BLACKLISTS_TYPES = "BlackLists.types";
private final static String BLACKLIST_SHARED = "BlackLists.Shared";
/* =====================================================================

@ -1196,7 +1196,7 @@ public final class yacyClient {
public static void main(String[] args) {
System.out.println("yacyClient Test");
try {
final plasmaSwitchboard sb = new plasmaSwitchboard(args[0], "httpProxy.init", "DATA/SETTINGS/httpProxy.conf");
final plasmaSwitchboard sb = new plasmaSwitchboard(args[0], "httpProxy.init", "DATA/SETTINGS/httpProxy.conf", false);
/*final yacyCore core =*/ new yacyCore(sb);
yacyCore.peerActions.loadSeedLists();
final yacySeed target = yacyCore.seedDB.getConnected(args[1]);

@ -279,8 +279,8 @@ public final class yacy {
System.exit(1);
}
*/
sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf");
boolean pro = new File(homePath, "libx").exists();
sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf", pro);
sbSync.V(); // signal that the sb reference was set
// save information about available memory at startup time
@ -857,7 +857,7 @@ public final class yacy {
File root = new File(homePath);
try {
final plasmaSwitchboard sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf");
final plasmaSwitchboard sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf", false);
HashMap doms = new HashMap();
System.out.println("Started domain list extraction from " + sb.wordIndex.loadedURL.size() + " url entries.");
System.out.println("a dump will be written after double-check of all extracted domains.");
@ -973,7 +973,7 @@ public final class yacy {
private static void urllist(String homePath, String source, boolean html, String targetName) {
File root = new File(homePath);
try {
final plasmaSwitchboard sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf");
final plasmaSwitchboard sb = new plasmaSwitchboard(homePath, "yacy.init", "DATA/SETTINGS/httpProxy.conf", false);
File file = new File(root, targetName);
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file));

@ -183,7 +183,8 @@ mimeConfig = httpd.mime
proxyCache = DATA/HTCACHE
# the maximum disc cache size for files in proxyCache in megabytes
proxyCacheSize = 200
proxyCacheSize = 100
proxyCacheSize__pro = 600
# use the mostly direct mapping of URLs to Filenames
# makes it easy watching the content of the cache using file browsers
@ -263,7 +264,6 @@ search.BlackLists=url.default.black
surftips.BlackLists=url.default.black
news.BlackLists=url.default.black
proxyCookieBlackList=cookie.default.black
proxyCookieWhiteList=cookie.default.black
@ -465,8 +465,8 @@ proxyIndexingLocalMedia=true
# The crawling works the same way as the prefetch, but it is possible to
# assign a different crawling depth.
# Be careful with this number. Consider a branching factor of average 20;
# A prefect-depth of 8 would index 25.600.000.000 pages, maybe the whole WWW.
crawlingDepth=2
# A prefetch-depth of 8 would index 25.600.000.000 pages, maybe the whole WWW.
crawlingDepth=3
crawlingIfOlder=525600
crawlingDomFilterDepth=-1
crawlingDomMaxPages=-1
@ -517,7 +517,7 @@ filterOutStopwordsFromTopwords=true
# and another idlesleep is performed
20_dhtdistribution_idlesleep=30000
20_dhtdistribution_busysleep=10000
20_dhtdistribution_memprereq=4194304
20_dhtdistribution_memprereq=6291456
30_peerping_idlesleep=120000
30_peerping_busysleep=120000
30_peerping_memprereq=1048576
@ -526,6 +526,7 @@ filterOutStopwordsFromTopwords=true
40_peerseedcycle_memprereq=2097152
50_localcrawl_idlesleep=2000
50_localcrawl_busysleep=250
50_localcrawl_busysleep__pro=100
50_localcrawl_memprereq=4194304
50_localcrawl_isPaused=false
61_globalcrawltrigger_idlesleep=10000
@ -534,16 +535,18 @@ filterOutStopwordsFromTopwords=true
61_globalcrawltrigger_isPaused=false
62_remotetriggeredcrawl_idlesleep=10000
62_remotetriggeredcrawl_busysleep=1000
62_remotetriggeredcrawl_memprereq=4194304
62_remotetriggeredcrawl_memprereq=6291456
62_remotetriggeredcrawl_isPaused=false
70_cachemanager_idlesleep=1000
70_cachemanager_busysleep=0
70_cachemanager_memprereq=1048576
80_indexing_idlesleep=1000
80_indexing_busysleep=125
80_indexing_busysleep=200
80_indexing_busysleep__pro=10
80_indexing_memprereq=6291456
82_crawlstack_idlesleep=5000
82_crawlstack_busysleep=10
82_crawlstack_busysleep=50
82_crawlstack_busysleep__pro=10
82_crawlstack_memprereq=1048576
90_cleanup_idlesleep=300000
90_cleanup_busysleep=300000
@ -558,7 +561,7 @@ filterOutStopwordsFromTopwords=true
# a cluster value of '2' would be appropriate
80_indexing_cluster=1
# ram cache for database files
# ram cache init timeout for database files
# ram cache for collection index
ramCacheRWI_time = 30000
@ -608,6 +611,8 @@ ramCacheProfiles_time= 500
# othervise the YaCy-internal memory supervision does not work
javastart_Xmx=Xmx96m
javastart_Xms=Xms96m
javastart_Xmx__pro=Xmx512m
javastart_Xms__pro=Xms512m
# priority of the yacy-process
# is valid in unix/shell and windows environments but
@ -623,6 +628,9 @@ javastart_priority=0
wordCacheMaxCount = 20000
wordCacheInitCount = 30000
wordFlushSize = 500;
wordCacheMaxCount__pro = 60000
wordCacheInitCount__pro = 80000
wordFlushSize__pro = 1000;
# Specifies if yacy can be used as transparent http proxy.
#
@ -682,10 +690,12 @@ crawler.clientTimeout=9000
crawler.http.acceptEncoding=gzip
crawler.http.acceptLanguage=en-us,en;q=0.5
crawler.http.acceptCharset=ISO-8859-1,utf-8;q=0.7,*;q=0.7
crawler.http.maxFileSize=262144
crawler.http.maxFileSize=131072
crawler.http.maxFileSize__pro=524288
# ftp crawler specific settings
crawler.ftp.maxFileSize=262144
crawler.ftp.maxFileSize=131072
crawler.ftp.maxFileSize__pro=524288
# maximum number of crawler threads
crawler.MaxActiveThreads = 30
@ -697,11 +707,13 @@ stacker.MaxIdleThreads = 10
stacker.MinIdleThreads = 5
# maximum size of indexing queue
indexer.slots = 60
indexer.slots = 40
indexer.slots__pro = 80
# specifies if yacy should set it's own referer if no referer URL
# was set by the client.
useYacyReferer = true
useYacyReferer__pro = false
# allow only 443(https-port) for https-proxy?
# if you want to tunnel other protokols, set to false

Loading…
Cancel
Save