From e9e0d63897516ce8aa3e13673cfccd3841016d27 Mon Sep 17 00:00:00 2001 From: reger Date: Thu, 27 Dec 2012 10:01:10 +0100 Subject: [PATCH] Add config option to show HostBrowser link in search result - ConfigPortal: added checkbox Host Browser - yacy.init: added search.result.show.hostbrowser as default = on (true) - fix HostBrowser: broken link to protected WebStructurePicture for public user --- defaults/yacy.init | 2247 ++++++++++++++++++------------------ htroot/ConfigPortal.html | 455 ++++---- htroot/ConfigPortal.java | 485 ++++---- htroot/HostBrowser.html | 4 +- htroot/HostBrowser.java | 1 + htroot/yacysearchitem.html | 2 +- htroot/yacysearchitem.java | 2 + 7 files changed, 1603 insertions(+), 1593 deletions(-) diff --git a/defaults/yacy.init b/defaults/yacy.init index c371e9c48..9e7b8318d 100644 --- a/defaults/yacy.init +++ b/defaults/yacy.init @@ -1,1123 +1,1124 @@ -### -### YaCy Init File -### -# These properties will be loaded upon installation. -# They are used only once for set-up. -# If you make changes to this file and want these to make any effect, -# you must delete the yacy.conf file in DATA/SETTINGS - -# ---------------------------------------------------------------------------- -# the HTTP service configurations - -# port number where the server should bind to -port = 8090 - -# prefix for new default peer names -peernameprefix=_anon - -# use UPnP [true/false] -upnp.enabled = true -# remote host on UPnP device (for more than one connection) -upnp.remoteHost = - -#sometimes you may want yacy to bind to another port, than the one reachable from outside. -#then set bindPort to the port yacy should bind on, and port to the port, visible from outside -#to run yacy on port 8090, reachable from port 80, set bindPort=8090, port=80 and use -#iptables -t nat -A PREROUTING -p tcp -s 192.168.24.0/16 --dport 80 -j DNAT --to 192.168.24.1:8090 -#(of course you need to customize the ips) -bindPort = - -# SSL support: -# -# For a German manual see http://yacy-websuche.de/wiki/index.php/De:Interface%C3%9CberHTTPS -# -# English speaking user read below: -# -# With this you can access your peer using https://localhost:8090 -# -# There are two possibilities to specify which certificate should -# be used by YaCy. -# -# 1) Create a new certificate: -# -# *) For testing purposes, you can create a keystore with a self-signed certificate, -# using the following command: -# C:\> keytool -keystore mySrvKeystore -genkey -keyalg RSA -alias mycert -# -# *) Then configure the keyStoreXXXX properties accordingly, e.g. -# keyStore = c:/yacy/DATA/SETTINGS/mySrvKeystore -# keyStorePassword = mypwd -# -# 2) Import an existing certificate: -# -# Alternatively you can import an existing certificate in pkcs12 format into -# the keystore. -# -# This can be done by setting the pkcs12XXX properties accordingly, e.g. -# pkcs12ImportFile = c:/temp/keystore.pkcs12 -# pkcs12ImportPwd = test -# -# If the property keyStore is not specified, then a new keystore file -# DATA/SETTINGS/myPeerKeystore will be created. - -keyStore = -keyStorePassword = -pkcs12ImportFile = -pkcs12ImportPwd = - -# property that collects the names of all servlets that had been used so far -# that is used to track if the user has already done some configuration steps -# if the used missed configuration steps that should be done, then a help system -# is possible which leads the used based on the list of servlets that had been used -# the list distinguishes called and submitted servlets -server.servlets.submitted = - -# server tracking: maximum time a track entry is hold in the internal cache -# value is in milliseconds, default is one hour -server.maxTrackingTime = 3600000 - -# maximum number of tracks per host -server.maxTrackingCount = 1000 - -# maximum number of hosts that are tracked -server.maxTrackingHostCount = 100 - -# maximum file sizes: since some users experience problems with too large files -# the file size of database files can be limited. Larger files can be used to get a -# better IO performance and to use less RAM; however, if the size must be limited -# because of limitations of the file system, the maximum size can be set here -filesize.max.win = 2147483647 -filesize.max.other = 8589934591 - -# Network Definition -# There can be separate YaCy networks, and managed sub-groups of the general network. -# The essentials of the network definition are attached in separate property files. -# The property here can also be a url where the definition can be loaded. -# In case of privately managed networks, this configuration must be changed BEFORE it is released -# to the members of the separated network peers. -network.unit.definition = defaults/yacy.network.freeworld.unit -#network.unit.definition = defaults/yacy.network.intranet.unit - -# distinguish intranet/internet IPs: -# if this setting is set to true, then only URL-Hashes with 'intranet'-Flag is created, even if the -# url is in the internet. This can be done to enhance the crawling speed dramatically since a DNS-lookup -# to check if a host is in the internet oder an intranet can be omited. -# This option is only valid if the network.unit.domain property is set to 'any' -network.unit.domain.nocheck = false - -# in addition to non-dht networks a client may have its own agent name -# this option is only used if the value is non-empty and network.unit.dht = false -# that means it is not usable in YaCy p2p-configurations, only in private portal configurations -network.unit.tenant.agent = - -# Update process properties -# The update server location is given in the network.unit.definition, -# but the settings for update processing and cycles are individual. -# the update process can be either 'manual' (no automatic lookup for new versions), -# 'guided' (automatic lookup, but user is asked before update is performed', -# or 'auto' (whenever an update is available, the update is loaded and installed) -update.process = manual -# the cycle value applies only if the process is automatic or guided. The value means hours. -# There is currently a fixed minimum number of hours of 24 hours for updates -update.cycle = 168 -# a version number blacklist can restrict automatic or guided updates to a specific -# range of version numbers. The restriction is done with a blacklist (standard regexpr) -# It is recommended to set this list to low developer version numbers -update.blacklist = ...[123] -# a update can also restricted with a concept property, which can decide if an -# update is only valid if it either is a main release or any release including new development releases -# Valid keywords are 'main' and 'any' -update.concept = any -# the following values are set automatically: -# the lookup time when the last time a lookup to the network update server(s) where done -update.time.lookup = 0 -# the download time when the last time a release was downloaded -update.time.download = 0 -# the deploy time when the last update was done; milliseconds since epoch -update.time.deploy = 0 -# delete old downloaded files after this amount of days to free disk space -# the latest release is always kept -update.deleteOld = 30 -# only install sign files -update.onlySignedFiles = 1 - -# restart-option -# a peer can be re-started periodically -# restart.process can be either 'off' (no automatic restart) or 'time' (time- rule-based, see below) -restart.process = off -# the restart.cycle is the number of hours that must pass before a restart is done -restart.cycle = 20 -# the restart.hour is a pattern that must match with the hour string (two-digit, 24h) -# when the restart should be performed -restart.hour = 03 -# the following values are set automatically -restart.time = 0 - -# clusters within a network: -# every network can have an unlimited number of clusters. Clusters may be also completely -# sealed and have no connection to other peers. When a cluster does not use the -# p2p protocol and the bootstraping mechanism to contact other peers, we call them -# Robinson peers. They can appear in different 'visibilities': -# - privatepeer: no connection and no data exchange to any other peer -# - privatecluster: connections only to self-defined addresses (other peers in same mode) -# - publiccluster: like privatecluster, but visible and searcheable by public p2p nodes -# - publicpeer: a single peer without cluster connection, but visible for p2p nodes -# all public robinson peers should use a peer tag string to be searcheable if in the -# search request these tags appear -cluster.mode=publicpeer -cluster.peers.yacydomain=localpeer.yacy -cluster.peers.ipport=localhost:8090 - -# bootstrapLoadTimeout -# this is the time-out for loading of the seedlist files during bootstraping -# the time should not be too long, since loading of the seedlist is not parallelized -# and a not successful loading of a seed file may prevent a peer from becoming -# a (at least) junior status. If the time-out is too short, there is the danger -# that the peer stays in virgin mode -bootstrapLoadTimeout = 6000 - -# time-out of client control socket in milliseconds -# since this applies only to the client-proxy connection, -# it can be rather short -# milliseconds -clientTimeout = 10000 - -# maximal number of httpd sessions -# a client may open several connections at once, and the httpdMaxBusySessions value sets -# a limit on the number of concurrent connections -httpdMaxBusySessions = 200 - -# default root path for the file server -# may be overridden by the htdocs parameter -# users shall be encouraged to use the htdocs path for individual content, -# not this path defined here -htRootPath = htroot - -# the htroot path -# root path for the httpd file server -htDefaultPath=htroot - -# individual htroot folder -# every user may publicize her/his own web pages -# these pages shall be placed in the path defined here -# the htdocs path shares its content with the htroot path -htDocsPath = DATA/HTDOCS - -# alternative path for the repository path of the web server: the URL -# http://localhost:8090/repository -# points to DATA/HTDOCS/repository, but can be altered with this repository path -# hint: the repository path is the default path for intranet indexing. The easiest ways -# to do a indexing of the local storage system is to set a path here for the repository -# that points to the root path of the files that shall be indexed -repositoryPath=DATA/HTDOCS/repository - -# the default files (typically index.html), if no file name is given -# The complete path to this file is created by combination with the rootPath -# you can set a list of defaults, separated by comma -# the first one is preferred -defaultFiles = index.html,index.htm,default.html,search.html,console.html,control.html,welcome.html,wiki.html,forum.html,blog.html,email.html,content.html,monitor.html,share.html,dir.html,readme.txt - -# locale-options: YaCy supports localization. -# Web pages for special languages are located in the htLocalePath -# The htLocaleLang defines a list of language options as / -# the must exist as sub-path to htLocalePath -# the htLocaleSelection selects from the given locales, value=one-of- -locale.source=locales -locale.translated_html=DATA/LOCALE/htroot -locale.language=default - -# virtual host for httpdFileServlet access -# for example http:/// shall access the file servlet and -# return the defaultFile at rootPath -# either way, http:/// denotes the same as http://localhost:/ -# for the preconfigured value 'localpeer', the URL is: -# http://localpeer/ -fileHost = localpeer - -# specify the path to the MIME matching file table -mimeTable = defaults/httpd.mime - -# specify the path to the sessionid name file -sessionidNamesFile = defaults/sessionid.names - -# a path to the file cache, used for the internal proxy and as crawl buffer -# This will be used if the server is addressed as a proxy -proxyCache = DATA/HTCACHE - -# the maximum disc cache size for files in Cache in megabytes -# default: 4 Gigabyte -proxyCacheSize = 4096 - -# a path to the surrogate input directory -surrogates.in = DATA/SURROGATES/in - -# a path to the surrogate output directory -surrogates.out = DATA/SURROGATES/out - -# a path to the dictionaries directory -# this directory also contains subdirectories for input sources, the did-you-mean function and other -dictionaries = DATA/DICTIONARIES - -# storage place for new releases -releases = DATA/RELEASE - -# time limits for the crawler: -# these times (milliseconds) are the shortest times for an access of the crawler to the same domain -# the crawler may read files faster than that, but never from the same domain faster than these time intervals -# a delta of 500 milliseconds means that no more than two files are taken from the same server -# there is a hard-coded limit which prevents that the used time is shorter that these default times -# the time-limits are distinguished for local and global crawls: there is no limit for an intranet-crawl. -minimumLocalDelta = 0 -minimumGlobalDelta = 500 - -# the following mime-types are a blacklist for indexing: -# parser.mime.deny: specifies mime-types that shall not be indexed -parser.mime.deny= -parser.extensions.deny= -parser.enableAudioTags=false - -# Promotion Strings -# These strings appear in the Web Mask of the YACY search client -# Set these Strings to cusomize your peer and give any message to -# other peer users -promoteSearchPageGreeting = P2P Web Search -# if the following property is set to true, the network name is used as greeting -promoteSearchPageGreeting.useNetworkName = false -# the following attributes can be used to define a custom image and home page on the search page -promoteSearchPageGreeting.homepage = http://yacy.net -promoteSearchPageGreeting.largeImage = /env/grafics/YaCyLogo_120ppi.png -promoteSearchPageGreeting.smallImage = /env/grafics/YaCyLogo_60ppi.png - -# the path to the public reverse word index for text files (web pages) -# the primary path is relative to the data root, the secondary path is an absolute path -# when the secondary path should be equal to the primary, it must be declared empty -indexPrimaryPath=DATA/INDEX - -# the path to the LISTS files. Most lists are used to filter web content -listsPath=DATA/LISTS - -# path to additional databases, like messages, blog data and bookmarks -workPath=DATA/WORK - -# the path to the SKINS files. -skinPath=DATA/SKINS - -# the yellow-list; URL's elements -# (the core of an URL; like 'yahoo' in 'de.yahoo.com') -# appearing in this list will not get a manipulated user agent string -proxyYellowList=yacy.yellow - -# the black-list; URLs appearing in this list will not be loaded; -# instead always a 404 is returned -# all these files will be placed in the listsPath -BlackLists.Shared=url.default.black -BlackLists.DefaultList=url.default.black - -#these are not needed as default. they just keep the values from beeing deleted ... -proxy.BlackLists=url.default.black -crawler.BlackLists=url.default.black -dht.BlackLists=url.default.black -search.BlackLists=url.default.black -surftips.BlackLists=url.default.black -news.BlackLists=url.default.black - -proxyCookieBlackList=cookie.default.black -proxyCookieWhiteList=cookie.default.black - -# the blue-list; -# no search result is locally presented that has any word of the bluelist -# in the search words, the URL or the URL's description -plasmaBlueList=yacy.blue - -# this proxy may in turn again access another proxy -# if you wish to do that, specify it here -# if you want to switch on the proxy use, set remoteProxyUse=true -# remoteProxyNoProxy is a no-proxy pattern list for the remote proxy -remoteProxyUse=false -remoteProxyUse4Yacy=true -remoteProxyUse4SSL=true - -remoteProxyHost=192.168.2.2 -remoteProxyPort=4239 -remoteProxyUser= -remoteProxyPwd= - -remoteProxyNoProxy=10\..*,127\..*,172\.(1[6-9]|2[0-9]|3[0-1])\..*,169\.254\..*,192\.168\..*,localhost,0:0:0:0:0:0:0:1 - -# the proxy may filter the content of transferred web pages -# the bluelist removes specific keywords from web pages -proxyBlueList=yacy.blue - -# security settigns -# we provide proxy and server security through a 2-stage security gate: -# 1st stage: firewall-like access control trough ip filter for clients -# 2nd stage: password settings for proxy, server and server administrators -# by default, these settings are weak to simplify set-up and testing -# every user/adiministrator shall be encouraged to change these settings -# your can change them also online during run-time on -# http://localhost:8090/ - -# proxyClient: client-ip's that may connect the proxy for proxy service -# if several ip's are allowed then they must be separated by a ',' -# regular expressions may be used -#proxyClient=192.168.0.4 -proxyClient=localhost,127\.0\.0\.1,192\.168\..*,10\..*,0:0:0:0:0:0:0:1.* - -# YaCyHop: allow public usage of proxy for yacy-protocol -# this enables usage of the internal http proxy for everyone, -# if the file path starts with /yacy/ -# This is used to enable anonymization of yacy protocol requests -# Instead of asking a remote peer directly, a peer in between is asked -# to prevent that the asked peer knows which peer asks. -YaCyHop=true - -# serverClient: client-ip's that may connect to the web server, -# thus are allowed to use the search service -# if you set this to another value, search requst from others -# are blocked, but you will also be blocked from using others -# search services. -serverClient=* - -# use_proxyAccounts: set to true to restrict proxy-access to some identified users. -#use User_p.html to create some Users. -use_proxyAccounts=true - -# adminAccount: a user:password - pair for administration of -# settings through the web interface -# should be set to a secret. By default it is without a password -# but you are encouraged to set it to another value on the page -# http://localhost:8090/ConfigAccounts_p.html -#adminAccount=admin:mysecretpassword -adminAccount= -adminAccountBase64MD5= - -# special access handling for users from localhost: -# access from localhost may be granted with administration authority -# if this flag is set. It is set to true by default to make usage of YaCy easy -# if you use YaCy on a headless server, you should set this to false -# or configure this on http://localhost:8090/ConfigAccounts_p.html -# during the first 10 minutes of operation of YaCy; -# if the admin account password is still empty after 10 minutes a random -# password is generated an access is then ONLY from localhost, which will cause -# inaccessibility for installations on headless servers. -adminAccountForLocalhost=true - -# if you are running a principal peer, you must update the following variables -# The upload method that should be used to upload the seed-list file to -# a public accessible webserver where it can be loaded by other peers. -# -# You can set the seedUploadMethod-Property to -# - None -# - Ftp -# - File -# - Scp (only if you have installed the optional addon) -# -seedUploadMethod=none - -# This is the most common method to upload the seed-list -# -# This is an ftp account with all relevant information. -# The update is only made if there had been changes in between. -seedFTPServer= -seedFTPAccount= -seedFTPPassword= -seedFTPPath= - -# alternatively to an FTP account, a peer can also become a principal peer -# if the seed-list can be generated as a file and that file is also accessible from -# the internet. In this case, omit any ftp settings and set this path here. -# if this path stays empty, an ftp account is considered -# however, you must always set a seedURL because it is used to check if the -# file is actually accessible from the internet -seedFilePath= - -# Settings needed to upload the seed-list file via scp -# -# Please note that this upload method can only be used if you have installed -# this optional upload method. -seedScpServer= -seedScpServerPort= -seedScpAccount= -seedScpPassword= -seedScpPath= - -# every peer periodically scans for other peers. you can set the time -# of the period here (minutes) -peerCycle=2 - -# Debug mode for YACY network: this will trigger that also local ip's are -# accepted as peer addresses -yacyDebugMode=false - -#staticIP if you have a static IP, you can use this setting -staticIP= - -# each time YaCy starts up, it can trigger the local browser to show the -# status page. This is active by default, to make it easier for first-time -# users to understand what this application does. You can disable browser -# pop-up here or set a different start page, like the search page -browserPopUpTrigger=true -browserPopUpPage=index.html - -# a forward page can be given for the index.html page -# when a user accesses the index.html page, he/she is forwarded to the page -# as given by indexForward. This is by default not defined which means 'no forward' -indexForward = - -# defines if the YaCy icon appears in the system tray on supported platforms -tray.icon.enabled=true -tray.icon.force=false -tray.icon.label=YaCy -tray.menu.enabled=true - -# index sharing attributes: by default, sharing is on. -# If you want to use YaCy only for local indexing (robinson mode), -# you may switch this off -allowDistributeIndex=true -allowDistributeIndexWhileCrawling=false -allowDistributeIndexWhileIndexing=true -allowReceiveIndex=true -allowUnlimitedReceiveIndexFrom= -indexReceiveBlockBlacklist=true - -# the frequency is the number of links per minute, that the peer allowes -# _every_ other peer to send to this peer -defaultWordReceiveFrequency=100 -defaultLinkReceiveFrequency=30 -# the default may be overridden for each peer individually, these -# settings are only available through the online interface - -# prefetch parameters -# the prefetch depth assigns a specific depth to the prefetch mechanism -# prefetch of 0 means no prefetch; a prefetch of 1 means to prefetch all -# embedded URLs, but since embedded image links are loaded by the browser -# this means that only embedded anchors are prefetched additionally -# a prefetch of 2 would result in loading of all images and anchor pages -# of all embedded anchors. Be careful with this value, since even a prefetch -# of 2 would result in hundreds of prefetched URLs for each single proxy fill. -proxyPrefetchDepth=0 -proxyStoreHTCache=true -proxyIndexingRemote=false -proxyIndexingLocalText=true -proxyIndexingLocalMedia=true - -# proxy usage only for .yacy-Domains for autoconfig -proxyYacyOnly=false - -# enable proxy via url (/proxy.html?url=http://yacy.net) -proxyURL=false -proxyURL.access=127.0.0.1,0:0:0:0:0:0:0:1 -# which urls to rewrite to /proxy.html?url=x (values: all, domainlist) -proxyURL.rewriteURLs=domainlist -proxyURL.useforresults=false - -# From the 'IndexCreate' menu point you can also define a crawling start point. -# The crawling works the same way as the prefetch, but it is possible to -# assign a different crawling depth. -# Be careful with this number. Consider a branching factor of average 20; -# A prefetch-depth of 8 would index 25.600.000.000 pages, maybe the whole WWW. -crawlingDepth=3 -crawlingDirectDocByURL=true -crawlingIfOlder=-1 -crawlingDomFilterDepth=-1 -crawlingDomMaxPages=-1 -indexText=true -indexMedia=true - -# Filter for crawling; may be used to restrict a crawl to a specific domain -# URLs are only indexed and further crawled if they match this filter -crawlingFilter=.* -crawlingQ=false -storeHTCache=true -storeTXCache=true - -# peers may initiate remote crawling tasks. -# every peer may allow or disallow to be used as crawling-peer; -# you can also set a maximum crawl depth that can be requested or accepted -# order=parameters for requester; response=parameters for responder -# these values apply only for senior-senior - communication -# The delay value is number of seconds bewteen two separate orders -# crawlOrder: default value for remote crawl starts -# crawlResponse: set to true if a peer should retrieve remote crawl urls from other peers -crawlOrder=true -crawlOrderDepth=0 -crawlResponse=false -crawlResponseDepth=0 - -# indexing-exclusion - rules -# There rules are important to reduce the number of words that are indexed -# We distinguish three different sets of stop-words: -# static - excludes all words given in the file yacy.stopwords from indexing, -# dynamic - excludes all words from indexing which are listed by statistic rules, -# parental - excludes all words from indexing which had been indexed in the parent web page. -xsstopw=true -xdstopw=true -xpstopw=true - -# Topwords filtering -# If set to true, all stopwords (stopwords.yacy) are filtered from the topwords -# Change to false if requesting hits from peers with modified stopwords-file and using the unchanged client-version -filterOutStopwordsFromTopwords=true - -# crawling steering: must-match/must-not-match -crawlingIPMustMatch=.* -crawlingIPMustNotMatch= -# the default country codes are all codes for countries in Europe -crawlingCountryMustMatch=AD,AL,AT,BA,BE,BG,BY,CH,CY,CZ,DE,DK,EE,ES,FI,FO,FR,GG,GI,GR,HR,HU,IE,IM,IS,IT,JE,LI,LT,LU,LV,MC,MD,MK,MT,NL,NO,PL,PT,RO,RU,SE,SI,SJ,SK,SM,TR,UA,UK,VA,YU - -# collections for index data separation -# these collections can either be used to produce search tenants. -# The collection is used in the site-parameter in the GSA interface. -# Collections are assigned during crawl-time and defined in the crawl start. -# The YaCyScheme field collection_sxt must be switched on to use this field. -collection=user - -# performance-settings -# delay-times for permanent loops (milliseconds) -# the idlesleep is the pause that an proces sleeps if the last call to the -# process job was without execution of anything; -# the busysleep is the pause after a full job execution -# the prereq-value is a memory pre-requisite: that much bytes must -# be available/free in the heap; othervise the loop is not executed -# and another idlesleep is performed -20_dhtdistribution_idlesleep=30000 -20_dhtdistribution_busysleep=15000 -20_dhtdistribution_memprereq=12582912 -30_peerping_idlesleep=30000 -30_peerping_busysleep=30000 -30_peerping_memprereq=2097152 -40_peerseedcycle_idlesleep=1800000 -40_peerseedcycle_busysleep=1200000 -40_peerseedcycle_memprereq=4194304 -50_localcrawl_idlesleep=2000 -50_localcrawl_busysleep=20 -50_localcrawl_memprereq=12582912 -50_localcrawl_isPaused=false -60_remotecrawlloader_idlesleep=4000 -60_remotecrawlloader_busysleep=800 -60_remotecrawlloader_memprereq=12582912 -60_remotecrawlloader_isPaused=false -62_remotetriggeredcrawl_idlesleep=2000 -62_remotetriggeredcrawl_busysleep=200 -62_remotetriggeredcrawl_memprereq=12582912 -62_remotetriggeredcrawl_isPaused=false -70_surrogates_idlesleep=10000 -70_surrogates_busysleep=0 -70_surrogates_memprereq=12582912 -90_cleanup_idlesleep=300000 -90_cleanup_busysleep=300000 -90_cleanup_memprereq=0 - -# additional attributes: -# performanceIO is a percent-value. a value of 10 means, that 10% of the busysleep time -# is used to flush the RAM cache, which is the major part of the IO in YaCy -performanceProfile=defaults/yacy.init -performanceSpeed=100 -performanceIO=10 - -# cleanup-process: -# properties for tasks that are performed during cleanup -cleanup.deletionProcessedNews = true -cleanup.deletionPublishedNews = true -cleanup.failedSearchURLtimeout = 86400000 - - -# default memory settings for startup of yacy -# is valid in unix/shell and windows environments but -# not for first startup of YaCy - -# -Xmx and -Xms maximum/init Java heap size -# if a high performance for large search indexes is wanted, then setting the values to equal number is recommended -# if YaCy shall be nice in not-only-yacy environments, then the Xms value may be lower -javastart_Xmx=Xmx600m -javastart_Xms=Xms90m - -# YaCy is able to use RAM copies of database tables. This needs a lot of RAM. -# To switch on copying of file tables int RAM, there must be enough memory -# The memory that is available at startup time is used to switch the feature on -# The tableCachingLimit is the amount of free RAM at startup time to switch on the feature -tableCachingLimit=419430400 - -# some java versions may be limited to a specific array size -# of 134217727 entries. To prevent that tables of that size are generated, -# set this property to false -# If you want to have better performance and switch ramcopy on, try also to -# set this property to true -# this value is automatically set to true, if more than two gigabyte is available -exceed134217727=false - -# priority of the yacy-process -# is valid in unix/shell and windows environments but -# not for first startup of YaCy -# UNIX: corresponds to the nice-level -# WIN: -20=realtime;-15=high;-10=above;0=normal;10=below;20=low -javastart_priority=10 - -# performance properties for the word index cache -# wordCacheMaxLow/High is the number of word indexes that shall be held in the -# ram cache during indexing. If you want to increase indexing speed, increase this -# value i.e. up to one million, but increase also the memory limit to a minimum of 2GB -wordCacheMaxCount = 50000 - -# Specifies if yacy can be used as transparent http proxy. -# -# Please note that you also have to reconfigure your firewall -# before you can use yacy as transparent proxy. On linux this -# can be done like this: -# iptables -t nat -A PREROUTING -p tcp -s 192.168.0.0/16 \ -# --dport 80 -j DNAT --to 192.168.0.1:8090 -# -# With this iptables filter listed above all http traffic that -# comes from your private network (in this case 192.168.0.0) -# and goes to any webserver listening on port 80 will be forwarded -# by the firewall to yacy running on port 8090 (192.168.0.1:8090) -isTransparentProxy=false - -# Specifies if yacy should use the http connection keep-alive feature -connectionKeepAliveSupport=true - -# Specifies the timeout the proxy sould use -proxy.clientTimeout = 30000 - -# Specifies if the proxy should send the via header according to RFC -proxy.sendViaHeader=true - -# Specifies if the proxy should send the X-Forwarded-For header -proxy.sendXForwardedForHeader=true - -# Enable cookie monitoring -proxy.monitorCookies=false - -# msgForwarding: Specifies if yacy should forward received messages via -# email to the configured email address -msgForwardingEnabled=false -msgForwardingCmd=/usr/sbin/sendmail -msgForwardingTo=root@localhost - -#crawlPause: delay time after specific functions before crawling is resumed -crawlPause.proxy=10 -crawlPause.localsearch=50 -crawlPause.remotesearch=10 - -# Some configuration values for the crawler -crawler.clientTimeout=30000 - -# http crawler specific settings; size in bytes -crawler.http.accept=text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 -crawler.http.acceptEncoding=gzip -crawler.http.acceptLanguage=en-us,en;q=0.5 -crawler.http.acceptCharset=ISO-8859-1,utf-8;q=0.7,*;q=0.7 -crawler.http.maxFileSize=10485760 -crawler.http.FollowRedirects=true -crawler.http.RecordRedirects=false - -# ftp crawler specific settings; size in bytes -crawler.ftp.maxFileSize=10485760 - -# smb crawler specific settings: maximum size -crawler.smb.maxFileSize=100000000 - -# smb crawler specific settings: maximum size -crawler.file.maxFileSize=100000000 - -# maximum number of crawler threads -crawler.MaxActiveThreads = 200 - -# maximum size of indexing queue -indexer.slots = 100 - -# maximum size of stacker queue -stacker.slots = 2000 - -# search options: show advanced options on main search page -search.options = true - -# search domains. If set to false then that search is not available -search.text = true -search.image = true -search.audio = true -search.video = true -search.app = false - -# number of search results displayed by default -search.items = 10 - -# target for search results; this is the href target attribute inside every search result link -# possible values: -# "_blank" (new window), "_self" (same window), "_parent" (the parent frame of a frameset), -# "_top" (top of all frames), "searchresult" (a default custom page name for search results) -# a special pattern can be given for exceptions to the default target according to urls -search.target = _self -search.target.special = _self -search.target.special.pattern = - -# search result lines may show additional information for each search hit -# these information pieces may be switched on or off -search.result.show.date = true -search.result.show.size = false -search.result.show.metadata = false -search.result.show.parser = false -search.result.show.pictures = false -search.result.show.cache = true -search.result.show.proxy = false -search.result.show.tags = false - -# search navigators: comma-separated list of default values for search navigation. -# can be temporary different if search string is given with differen navigation values -# assigning no value(s) means that no navigation is shown -search.navigation=hosts,authors,namespace,topics,filetype,protocol - -# search result verification and snippet fetch caching rules -# each search result can be verified byloading the link from the web -# this can be enhanced using a cache. In some cases it may be appropriate -# to not verify the link at all and do not compute a snippet -# the possible cases are: -# nocache: no use of web cache, load all snippets online -# iffresh: use the cache if the cache exists and is fresh otherwise load online -# ifexist: use the cache if the cache exist or load online -# cacheonly: never go online, use all content from cache. If no cache entry exist, -# consider content nevertheless as available and show result without snippet -# false: no link verification and not snippet generation: - all search results are valid without verification -search.verify = ifexist - -search.excludehosts= -search.excludehosth= - -# in case that a link verification fails then the corresponding index reference can be -# deleted to clean up the index. If this property is set then failed index verification in -# the cases of nocache, iffresh and ifexist causes an index deletion -search.verify.delete = true - -# remote search details -remotesearch.maxcount = 20 -remotesearch.maxtime = 1000 - -# specifies if yacy should set it's own referer if no referer URL -# was set by the client. -useYacyReferer = false - -# allow only 443(https-port) for https-proxy? -# if you want to tunnel other protocols, set to false -secureHttps = true - -# specifies if the httpdFileHandler should cache -# the template-files from the htroot directory -enableTemplateCache = true - -# specifies if the http post body should be transfered -# using content-encoding gzip during index transfer -# a) indexDistribution: which is done periodically if you have enabled -# Index Distribution via IndexControl_p.html -# b) indexTransfer: which can be used to transfer the whole index of a peer -# this can be started via IndexTransfer_p.html -# c) indexControl: which can be triggered manually via IndexControl_p.html to -# transfer a chosen subset of the peer index -indexDistribution.gzipBody = true -indexTransfer.gzipBody = true -indexControl.gzipBody = true - -# defining timeouts for index- transfer/distribution/control -indexControl.timeout = 60000 -indexDistribution.timeout = 60000 -indexTransfer.timeout = 120000 - -# defining max. allowed amount of open files during index- transfer/distribution -indexDistribution.maxOpenFiles = 800 -indexTransfer.maxOpenFiles = 800 - -# sizes for index distribution -indexDistribution.minChunkSize = 10 -indexDistribution.maxChunkSize = 1000 -indexDistribution.startChunkSize = 200 -indexDistribution.maxChunkFails = 1 - -# limit of references per term & blob to the younges of this value -# a value of <= 0 disables this feature (no limit) -# a value of e.g. 100000 can improve stability and reduce load while searching very popular words -index.maxReferences = 0 - -# Search sequence settings -# collection: -# time = time to get a RWI out of RAM cache, assortments and WORDS files -# count = maximum number of RWI-entries that shall be collected -# -# join: -# time = time to perform the join between all collected RWIs -# count = maximum number of entries that shall be joined -# -# presort: -# time = time to do a sort of the joined URL-records -# count = maximum number of entries that shall be pre-sorted -# -# urlfetch: -# time = time to fetch the real URLs from the LURL database -# count = maximum number of urls that shall be fetched -# -# postsort: -# time = time for final sort of URLs -# count = maximum number oof URLs that shall be retrieved during sort -# -# filter: -# time = time to filter out unwanted urls (like redundant urls) -# count = maximum number of urls that shall be filtered -# -# snippetfetch: -# time = time to fetch snippets for selected URLs -# count = maximum number of snipptes to be fetched -# -# all values are percent -# time-percent is the percent of total search time -# count-percent is the percent of total wanted urls in result -# we distinguish local and remote search times -searchProcessLocalTime_c = 44 -searchProcessLocalCount_c = 10000000 -searchProcessLocalTime_j = 8 -searchProcessLocalCount_j = 1000000 -searchProcessLocalTime_r = 8 -searchProcessLocalCount_r =100000 -searchProcessLocalTime_u = 20 -searchProcessLocalCount_u = 10000 -searchProcessLocalTime_o = 10 -searchProcessLocalCount_o = 100 -searchProcessLocalTime_f = 5 -searchProcessLocalCount_f = 100 -searchProcessLocalTime_s = 5 -searchProcessLocalCount_s = 30 - -searchProcessRemoteTime_c = 44 -searchProcessRemoteCount_c = 1000000 -searchProcessRemoteTime_j = 8 -searchProcessRemoteCount_j = 1000000 -searchProcessRemoteTime_r = 8 -searchProcessRemoteCount_r = 1000 -searchProcessRemoteTime_u = 20 -searchProcessRemoteCount_u = 1000 -searchProcessRemoteTime_o = 10 -searchProcessRemoteCount_o = 1000 -searchProcessRemoteTime_f = 5 -searchProcessRemoteCount_f = 100 -searchProcessRemoteTime_s = 5 -searchProcessRemoteCount_s = 10 - -# timeouts for snippet fetching in ms -# timeout_text is for text-snippets, timeout_media for media, e.g. images -timeout_text = 10000 -timeout_media = 15000 - -# a list of domain name patterns that should not be cached by the httpc dns cache -httpc.nameCacheNoCachingPatterns = .*.ath.cx,.*.blogdns.*,.*.boldlygoingnowhere.org,.*.dnsalias.*,.*.dnsdojo.*,.*.dvrdns.org,.*.dyn-o-saur.com,.*.dynalias.*,.*.dyndns.*,.*.ftpaccess.cc,.*.game-host.org,.*.game-server.cc,.*.getmyip.com,.*.gotdns.*,.*.ham-radio-op.net,.*.hobby-site.com,.*.homedns.org,.*.homeftp.*,.*.homeip.net,.*.homelinux.*,.*.homeunix.*,.*.is-a-chef.*,.*.is-a-geek.*,.*.kicks-ass.*,.*.merseine.nu,.*.mine.nu,.*.myphotos.cc,.*.podzone.*,.*.scrapping.cc,.*.selfip.*,.*.servebbs.*,.*.serveftp.*,.*.servegame.org,.*.shacknet.nu - -#externalRedirectors -#squid Redirector compatible -externalRedirector= - -svnRevision=0 - -currentSkin=pdblue - -# flag to show if pages shall be usable for non-admin users -# this can be applied to the Surftips.html and yacysearch.html page -publicSurftips = true -publicSearchpage = true - -# flag to show if the top navigation bar shall be shown to all users -# if this is disabled, then the user must navigate manually from the search page -# to /Status.html to get the main memu bar back -publicTopmenu = true - -# flag if a small link to the administration sites on the right bottom of the index -# page should be shown to all users. -# you might want to enabled this, if publicTopmenu is disabled -publicAdministratorPi = false - -# Wiki access rights -# the built-in wiki system allows by default only that the administrator is allowed to make changes -# this can be changed. There are three options: -# admin - only the admin has write right -# all - everybody has write right -# user - the admin and every user registered in the user db has write right -WikiAccess = admin - -# Search Profiles -# we will support different search profiles -# this is currently only a single default profile -# If this profile setting is empty, a hard-coded profile from plasmaSearchRanking is used -search.ranking.rwi.profile = -search.ranking.solr.boost.tmp2= -search.ranking.solr.doubledetection.minlength=3 -search.ranking.solr.doubledetection.quantrate=0.5f - -#optional extern thumbnail program. -#the program must accept the invocation PROGRAM http://url /path/to/filename -thumbnailProgram = - -# settings for the peer's local robots.txt -# the following restrictions are possible (comma-separated): -# - all : entire domain is disallowed -# - blog : the blog-pages -# - bookmarks : the bookmark-page -# - dirs : all directories in htroot (standard setting, as there is no usable information in) -# - fileshare : all files in the peer's file share (DATA/HTDOCS/share) -# - homepage : all files on the peer's home page (DATA/HTDOCS/www) -# - locked : all servlets ending on '_p.*' (standard setting, as robots would need a password to access them anyways) -# - news : the news-page -# - network : the network-pages -# - status : peer's status page -# - surftips : the surftips-page -# - wiki : the wiki-page -httpd.robots.txt = locked,dirs,bookmarks,network,news,status,profile - -# class to use for parsing wikicode -wikiParser.class = de.anomic.data.wikiCode - -# settings for automatic deletion of old entries in passive and potential seed-db -# time means max time (in days) a peer may not have been seen before it is deleted -routing.deleteOldSeeds.permission = true -routing.deleteOldSeeds.time = 30 - -# options to remember the default search engines when using the search compare features -compare_yacy.left = YaCy -compare_yacy.right = metager.de - -# minimum free disk space for crawling (MiB) -disk.free = 3000 -# minimum for DHT -disk.free.hardlimit = 1000 - -# minimum memory to accept dht-in (MiB) -memory.acceptDHTabove = 50 -memory.disabledDHT = false - -# wether using standard memory strategy - or try generation memory strategy -memory.standardStrategy = true - -# setting if execution of CGI files is allowed or not -cgi.allow = false -cgi.suffixes = cgi,pl - -# content integration settings -content.phpbb3.urlstub = http:/// -content.phpbb3.dbtype = mysql -content.phpbb3.dbhost = localhost -content.phpbb3.dbport = 3306 -content.phpbb3.dbname = forum -content.phpbb3.tableprefix = phpbb_ -content.phpbb3.dbuser = notroot -content.phpbb3.dbpw = joshua -content.phpbb3.ppf = 1000 -content.phpbb3.dumpfile = - -# search engine teaser: an about box in search results -# this is only shown, if the about.body is filled -about.headline = -about.body = - -# search heuristics -heuristic.site = false -heuristic.blekko = false -heuristic.twitter = false -heuristic.searchresults = false -heuristic.searchresults.crawlglobal = false - -# colours for generic design -color_background = #FFFFFF -color_text = #18294A -color_legend = #65AC2A -color_tableheader = #5090D0 -color_tableitem = #DEE6F3 -color_tableitem2 = #ECF1F8 -color_tablebottom = #FFCCCC -color_borderline = #888888 -color_signbad = #990000 -color_signgood = #009900 -color_signother = #000099 -color_searchheadline = #2200CC -color_searchurl = #008000 -color_searchurlhover = #008000 - - -# federated index storage and federated search functionality -# federated search means that other search engines may be used together with the built-in indexing. -# each federated search may be able to be used as remote indexing service and/or as remote search service. -# a typical use case for a federated search is a concurrent search from opensearch sources. -# a typical use case for a remote indexing service is a remote solr index. YaCy supports remote solr indexes. - -# solr indexes can be filled if enabled is set to true -# the remote index scheme is the same as produced by the SolrCell; see http://wiki.apache.org/solr/ExtractingRequestHandler -# because this default scheme is used the default example scheme can be used as solr configuration -# to use this, do the following: -# - set federated.service.solr.indexing.enabled = true -# - download solr from http://www.apache.org/dyn/closer.cgi/lucene/solr/ -# - extract the solr (3.1) package, 'cd example' and start solr with 'java -jar start.jar' -# - start yacy and then start a crawler. The crawler will fill both, YaCy and solr indexes. -# - to check whats in solr after indexing, open http://localhost:8983/solr/admin/ -federated.service.solr.indexing.enabled = false -federated.service.solr.indexing.url = http://127.0.0.1:8983/solr -federated.service.solr.indexing.commitWithinMs = 180000 -federated.service.solr.indexing.sharding = MODULO_HOST_MD5 -federated.service.solr.indexing.schemefile = solr.keys.default.list -# the lazy attribute causes that fields containing "" or 0 are not added and not written -federated.service.solr.indexing.lazy = true - -# temporary definition of backend services to use. -# After the migration a rwi+solr combination is used, the solr contains the content of the previously used metadata-db. -# To get a handle for a migration, these values are defined as temporary, if the migration starts the values are renamed -# and defined with different default values. -# The citation service is used for ranking; this is a reverse linking index. It should be on before and after the migration. -# It can be switched off if only a remote solr index is used. -core.service.fulltext = true -core.service.rwi.tmp = true -core.service.citation.tmp = true - -# RDF triplestore settings -triplestore.persistent = true - -# Augmentation settings -parserAugmentation = false -parserAugmentation.RDFa = false -proxyAugmentation = false -augmentation.reflect = false -augmentation.addDoctype = false -augmentation.reparse = false - -# Content control settings -contentcontrol.enabled = false -contentcontrol.bookmarklist = contentcontrol -contentcontrol.mandatoryfilterlist = yacy -contentcontrol.smwimport.enabled = false -contentcontrol.smwimport.baseurl = -contentcontrol.smwimport.purgelistoninit = true -contentcontrol.smwimport.targetlist = contentcontrol -contentcontrol.smwimport.defaultcategory = yacy - -# Interaction settings -interaction.enabled = false -interaction.target = yacy - -interaction.feedback.enabled = true -interaction.feedback.url = -interaction.feedback.accept = false -interaction.usertracking.enabled = true -interaction.addcontent.enabled = false -interaction.userlogon.enabled = false -interaction.approvelist.enabled = false -interaction.suggestrejected.enabled = false -interaction.overlayinteraction.enabled = false - -interaction.globalmenu.enabled = true -interaction.portalconfigbutton.enabled = true -interaction.crawltraces.enabled = false -interaction.userselfregistration.enabled = false -interaction.forcebookmarkimport = -interaction.visiblecategories = all - -interaction.dontimportbookmarks = - -interaction.autocrawler.enabled = false -interaction.autocrawler.domainfilter = .* -interaction.autocrawler.categoryfilter = .* - -# host browser settings -browser.autoload = false -browser.load4everyone = false +### +### YaCy Init File +### +# These properties will be loaded upon installation. +# They are used only once for set-up. +# If you make changes to this file and want these to make any effect, +# you must delete the yacy.conf file in DATA/SETTINGS + +# ---------------------------------------------------------------------------- +# the HTTP service configurations + +# port number where the server should bind to +port = 8090 + +# prefix for new default peer names +peernameprefix=_anon + +# use UPnP [true/false] +upnp.enabled = true +# remote host on UPnP device (for more than one connection) +upnp.remoteHost = + +#sometimes you may want yacy to bind to another port, than the one reachable from outside. +#then set bindPort to the port yacy should bind on, and port to the port, visible from outside +#to run yacy on port 8090, reachable from port 80, set bindPort=8090, port=80 and use +#iptables -t nat -A PREROUTING -p tcp -s 192.168.24.0/16 --dport 80 -j DNAT --to 192.168.24.1:8090 +#(of course you need to customize the ips) +bindPort = + +# SSL support: +# +# For a German manual see http://yacy-websuche.de/wiki/index.php/De:Interface%C3%9CberHTTPS +# +# English speaking user read below: +# +# With this you can access your peer using https://localhost:8090 +# +# There are two possibilities to specify which certificate should +# be used by YaCy. +# +# 1) Create a new certificate: +# +# *) For testing purposes, you can create a keystore with a self-signed certificate, +# using the following command: +# C:\> keytool -keystore mySrvKeystore -genkey -keyalg RSA -alias mycert +# +# *) Then configure the keyStoreXXXX properties accordingly, e.g. +# keyStore = c:/yacy/DATA/SETTINGS/mySrvKeystore +# keyStorePassword = mypwd +# +# 2) Import an existing certificate: +# +# Alternatively you can import an existing certificate in pkcs12 format into +# the keystore. +# +# This can be done by setting the pkcs12XXX properties accordingly, e.g. +# pkcs12ImportFile = c:/temp/keystore.pkcs12 +# pkcs12ImportPwd = test +# +# If the property keyStore is not specified, then a new keystore file +# DATA/SETTINGS/myPeerKeystore will be created. + +keyStore = +keyStorePassword = +pkcs12ImportFile = +pkcs12ImportPwd = + +# property that collects the names of all servlets that had been used so far +# that is used to track if the user has already done some configuration steps +# if the used missed configuration steps that should be done, then a help system +# is possible which leads the used based on the list of servlets that had been used +# the list distinguishes called and submitted servlets +server.servlets.submitted = + +# server tracking: maximum time a track entry is hold in the internal cache +# value is in milliseconds, default is one hour +server.maxTrackingTime = 3600000 + +# maximum number of tracks per host +server.maxTrackingCount = 1000 + +# maximum number of hosts that are tracked +server.maxTrackingHostCount = 100 + +# maximum file sizes: since some users experience problems with too large files +# the file size of database files can be limited. Larger files can be used to get a +# better IO performance and to use less RAM; however, if the size must be limited +# because of limitations of the file system, the maximum size can be set here +filesize.max.win = 2147483647 +filesize.max.other = 8589934591 + +# Network Definition +# There can be separate YaCy networks, and managed sub-groups of the general network. +# The essentials of the network definition are attached in separate property files. +# The property here can also be a url where the definition can be loaded. +# In case of privately managed networks, this configuration must be changed BEFORE it is released +# to the members of the separated network peers. +network.unit.definition = defaults/yacy.network.freeworld.unit +#network.unit.definition = defaults/yacy.network.intranet.unit + +# distinguish intranet/internet IPs: +# if this setting is set to true, then only URL-Hashes with 'intranet'-Flag is created, even if the +# url is in the internet. This can be done to enhance the crawling speed dramatically since a DNS-lookup +# to check if a host is in the internet oder an intranet can be omited. +# This option is only valid if the network.unit.domain property is set to 'any' +network.unit.domain.nocheck = false + +# in addition to non-dht networks a client may have its own agent name +# this option is only used if the value is non-empty and network.unit.dht = false +# that means it is not usable in YaCy p2p-configurations, only in private portal configurations +network.unit.tenant.agent = + +# Update process properties +# The update server location is given in the network.unit.definition, +# but the settings for update processing and cycles are individual. +# the update process can be either 'manual' (no automatic lookup for new versions), +# 'guided' (automatic lookup, but user is asked before update is performed', +# or 'auto' (whenever an update is available, the update is loaded and installed) +update.process = manual +# the cycle value applies only if the process is automatic or guided. The value means hours. +# There is currently a fixed minimum number of hours of 24 hours for updates +update.cycle = 168 +# a version number blacklist can restrict automatic or guided updates to a specific +# range of version numbers. The restriction is done with a blacklist (standard regexpr) +# It is recommended to set this list to low developer version numbers +update.blacklist = ...[123] +# a update can also restricted with a concept property, which can decide if an +# update is only valid if it either is a main release or any release including new development releases +# Valid keywords are 'main' and 'any' +update.concept = any +# the following values are set automatically: +# the lookup time when the last time a lookup to the network update server(s) where done +update.time.lookup = 0 +# the download time when the last time a release was downloaded +update.time.download = 0 +# the deploy time when the last update was done; milliseconds since epoch +update.time.deploy = 0 +# delete old downloaded files after this amount of days to free disk space +# the latest release is always kept +update.deleteOld = 30 +# only install sign files +update.onlySignedFiles = 1 + +# restart-option +# a peer can be re-started periodically +# restart.process can be either 'off' (no automatic restart) or 'time' (time- rule-based, see below) +restart.process = off +# the restart.cycle is the number of hours that must pass before a restart is done +restart.cycle = 20 +# the restart.hour is a pattern that must match with the hour string (two-digit, 24h) +# when the restart should be performed +restart.hour = 03 +# the following values are set automatically +restart.time = 0 + +# clusters within a network: +# every network can have an unlimited number of clusters. Clusters may be also completely +# sealed and have no connection to other peers. When a cluster does not use the +# p2p protocol and the bootstraping mechanism to contact other peers, we call them +# Robinson peers. They can appear in different 'visibilities': +# - privatepeer: no connection and no data exchange to any other peer +# - privatecluster: connections only to self-defined addresses (other peers in same mode) +# - publiccluster: like privatecluster, but visible and searcheable by public p2p nodes +# - publicpeer: a single peer without cluster connection, but visible for p2p nodes +# all public robinson peers should use a peer tag string to be searcheable if in the +# search request these tags appear +cluster.mode=publicpeer +cluster.peers.yacydomain=localpeer.yacy +cluster.peers.ipport=localhost:8090 + +# bootstrapLoadTimeout +# this is the time-out for loading of the seedlist files during bootstraping +# the time should not be too long, since loading of the seedlist is not parallelized +# and a not successful loading of a seed file may prevent a peer from becoming +# a (at least) junior status. If the time-out is too short, there is the danger +# that the peer stays in virgin mode +bootstrapLoadTimeout = 6000 + +# time-out of client control socket in milliseconds +# since this applies only to the client-proxy connection, +# it can be rather short +# milliseconds +clientTimeout = 10000 + +# maximal number of httpd sessions +# a client may open several connections at once, and the httpdMaxBusySessions value sets +# a limit on the number of concurrent connections +httpdMaxBusySessions = 200 + +# default root path for the file server +# may be overridden by the htdocs parameter +# users shall be encouraged to use the htdocs path for individual content, +# not this path defined here +htRootPath = htroot + +# the htroot path +# root path for the httpd file server +htDefaultPath=htroot + +# individual htroot folder +# every user may publicize her/his own web pages +# these pages shall be placed in the path defined here +# the htdocs path shares its content with the htroot path +htDocsPath = DATA/HTDOCS + +# alternative path for the repository path of the web server: the URL +# http://localhost:8090/repository +# points to DATA/HTDOCS/repository, but can be altered with this repository path +# hint: the repository path is the default path for intranet indexing. The easiest ways +# to do a indexing of the local storage system is to set a path here for the repository +# that points to the root path of the files that shall be indexed +repositoryPath=DATA/HTDOCS/repository + +# the default files (typically index.html), if no file name is given +# The complete path to this file is created by combination with the rootPath +# you can set a list of defaults, separated by comma +# the first one is preferred +defaultFiles = index.html,index.htm,default.html,search.html,console.html,control.html,welcome.html,wiki.html,forum.html,blog.html,email.html,content.html,monitor.html,share.html,dir.html,readme.txt + +# locale-options: YaCy supports localization. +# Web pages for special languages are located in the htLocalePath +# The htLocaleLang defines a list of language options as / +# the must exist as sub-path to htLocalePath +# the htLocaleSelection selects from the given locales, value=one-of- +locale.source=locales +locale.translated_html=DATA/LOCALE/htroot +locale.language=default + +# virtual host for httpdFileServlet access +# for example http:/// shall access the file servlet and +# return the defaultFile at rootPath +# either way, http:/// denotes the same as http://localhost:/ +# for the preconfigured value 'localpeer', the URL is: +# http://localpeer/ +fileHost = localpeer + +# specify the path to the MIME matching file table +mimeTable = defaults/httpd.mime + +# specify the path to the sessionid name file +sessionidNamesFile = defaults/sessionid.names + +# a path to the file cache, used for the internal proxy and as crawl buffer +# This will be used if the server is addressed as a proxy +proxyCache = DATA/HTCACHE + +# the maximum disc cache size for files in Cache in megabytes +# default: 4 Gigabyte +proxyCacheSize = 4096 + +# a path to the surrogate input directory +surrogates.in = DATA/SURROGATES/in + +# a path to the surrogate output directory +surrogates.out = DATA/SURROGATES/out + +# a path to the dictionaries directory +# this directory also contains subdirectories for input sources, the did-you-mean function and other +dictionaries = DATA/DICTIONARIES + +# storage place for new releases +releases = DATA/RELEASE + +# time limits for the crawler: +# these times (milliseconds) are the shortest times for an access of the crawler to the same domain +# the crawler may read files faster than that, but never from the same domain faster than these time intervals +# a delta of 500 milliseconds means that no more than two files are taken from the same server +# there is a hard-coded limit which prevents that the used time is shorter that these default times +# the time-limits are distinguished for local and global crawls: there is no limit for an intranet-crawl. +minimumLocalDelta = 0 +minimumGlobalDelta = 500 + +# the following mime-types are a blacklist for indexing: +# parser.mime.deny: specifies mime-types that shall not be indexed +parser.mime.deny= +parser.extensions.deny= +parser.enableAudioTags=false + +# Promotion Strings +# These strings appear in the Web Mask of the YACY search client +# Set these Strings to cusomize your peer and give any message to +# other peer users +promoteSearchPageGreeting = P2P Web Search +# if the following property is set to true, the network name is used as greeting +promoteSearchPageGreeting.useNetworkName = false +# the following attributes can be used to define a custom image and home page on the search page +promoteSearchPageGreeting.homepage = http://yacy.net +promoteSearchPageGreeting.largeImage = /env/grafics/YaCyLogo_120ppi.png +promoteSearchPageGreeting.smallImage = /env/grafics/YaCyLogo_60ppi.png + +# the path to the public reverse word index for text files (web pages) +# the primary path is relative to the data root, the secondary path is an absolute path +# when the secondary path should be equal to the primary, it must be declared empty +indexPrimaryPath=DATA/INDEX + +# the path to the LISTS files. Most lists are used to filter web content +listsPath=DATA/LISTS + +# path to additional databases, like messages, blog data and bookmarks +workPath=DATA/WORK + +# the path to the SKINS files. +skinPath=DATA/SKINS + +# the yellow-list; URL's elements +# (the core of an URL; like 'yahoo' in 'de.yahoo.com') +# appearing in this list will not get a manipulated user agent string +proxyYellowList=yacy.yellow + +# the black-list; URLs appearing in this list will not be loaded; +# instead always a 404 is returned +# all these files will be placed in the listsPath +BlackLists.Shared=url.default.black +BlackLists.DefaultList=url.default.black + +#these are not needed as default. they just keep the values from beeing deleted ... +proxy.BlackLists=url.default.black +crawler.BlackLists=url.default.black +dht.BlackLists=url.default.black +search.BlackLists=url.default.black +surftips.BlackLists=url.default.black +news.BlackLists=url.default.black + +proxyCookieBlackList=cookie.default.black +proxyCookieWhiteList=cookie.default.black + +# the blue-list; +# no search result is locally presented that has any word of the bluelist +# in the search words, the URL or the URL's description +plasmaBlueList=yacy.blue + +# this proxy may in turn again access another proxy +# if you wish to do that, specify it here +# if you want to switch on the proxy use, set remoteProxyUse=true +# remoteProxyNoProxy is a no-proxy pattern list for the remote proxy +remoteProxyUse=false +remoteProxyUse4Yacy=true +remoteProxyUse4SSL=true + +remoteProxyHost=192.168.2.2 +remoteProxyPort=4239 +remoteProxyUser= +remoteProxyPwd= + +remoteProxyNoProxy=10\..*,127\..*,172\.(1[6-9]|2[0-9]|3[0-1])\..*,169\.254\..*,192\.168\..*,localhost,0:0:0:0:0:0:0:1 + +# the proxy may filter the content of transferred web pages +# the bluelist removes specific keywords from web pages +proxyBlueList=yacy.blue + +# security settigns +# we provide proxy and server security through a 2-stage security gate: +# 1st stage: firewall-like access control trough ip filter for clients +# 2nd stage: password settings for proxy, server and server administrators +# by default, these settings are weak to simplify set-up and testing +# every user/adiministrator shall be encouraged to change these settings +# your can change them also online during run-time on +# http://localhost:8090/ + +# proxyClient: client-ip's that may connect the proxy for proxy service +# if several ip's are allowed then they must be separated by a ',' +# regular expressions may be used +#proxyClient=192.168.0.4 +proxyClient=localhost,127\.0\.0\.1,192\.168\..*,10\..*,0:0:0:0:0:0:0:1.* + +# YaCyHop: allow public usage of proxy for yacy-protocol +# this enables usage of the internal http proxy for everyone, +# if the file path starts with /yacy/ +# This is used to enable anonymization of yacy protocol requests +# Instead of asking a remote peer directly, a peer in between is asked +# to prevent that the asked peer knows which peer asks. +YaCyHop=true + +# serverClient: client-ip's that may connect to the web server, +# thus are allowed to use the search service +# if you set this to another value, search requst from others +# are blocked, but you will also be blocked from using others +# search services. +serverClient=* + +# use_proxyAccounts: set to true to restrict proxy-access to some identified users. +#use User_p.html to create some Users. +use_proxyAccounts=true + +# adminAccount: a user:password - pair for administration of +# settings through the web interface +# should be set to a secret. By default it is without a password +# but you are encouraged to set it to another value on the page +# http://localhost:8090/ConfigAccounts_p.html +#adminAccount=admin:mysecretpassword +adminAccount= +adminAccountBase64MD5= + +# special access handling for users from localhost: +# access from localhost may be granted with administration authority +# if this flag is set. It is set to true by default to make usage of YaCy easy +# if you use YaCy on a headless server, you should set this to false +# or configure this on http://localhost:8090/ConfigAccounts_p.html +# during the first 10 minutes of operation of YaCy; +# if the admin account password is still empty after 10 minutes a random +# password is generated an access is then ONLY from localhost, which will cause +# inaccessibility for installations on headless servers. +adminAccountForLocalhost=true + +# if you are running a principal peer, you must update the following variables +# The upload method that should be used to upload the seed-list file to +# a public accessible webserver where it can be loaded by other peers. +# +# You can set the seedUploadMethod-Property to +# - None +# - Ftp +# - File +# - Scp (only if you have installed the optional addon) +# +seedUploadMethod=none + +# This is the most common method to upload the seed-list +# +# This is an ftp account with all relevant information. +# The update is only made if there had been changes in between. +seedFTPServer= +seedFTPAccount= +seedFTPPassword= +seedFTPPath= + +# alternatively to an FTP account, a peer can also become a principal peer +# if the seed-list can be generated as a file and that file is also accessible from +# the internet. In this case, omit any ftp settings and set this path here. +# if this path stays empty, an ftp account is considered +# however, you must always set a seedURL because it is used to check if the +# file is actually accessible from the internet +seedFilePath= + +# Settings needed to upload the seed-list file via scp +# +# Please note that this upload method can only be used if you have installed +# this optional upload method. +seedScpServer= +seedScpServerPort= +seedScpAccount= +seedScpPassword= +seedScpPath= + +# every peer periodically scans for other peers. you can set the time +# of the period here (minutes) +peerCycle=2 + +# Debug mode for YACY network: this will trigger that also local ip's are +# accepted as peer addresses +yacyDebugMode=false + +#staticIP if you have a static IP, you can use this setting +staticIP= + +# each time YaCy starts up, it can trigger the local browser to show the +# status page. This is active by default, to make it easier for first-time +# users to understand what this application does. You can disable browser +# pop-up here or set a different start page, like the search page +browserPopUpTrigger=true +browserPopUpPage=index.html + +# a forward page can be given for the index.html page +# when a user accesses the index.html page, he/she is forwarded to the page +# as given by indexForward. This is by default not defined which means 'no forward' +indexForward = + +# defines if the YaCy icon appears in the system tray on supported platforms +tray.icon.enabled=true +tray.icon.force=false +tray.icon.label=YaCy +tray.menu.enabled=true + +# index sharing attributes: by default, sharing is on. +# If you want to use YaCy only for local indexing (robinson mode), +# you may switch this off +allowDistributeIndex=true +allowDistributeIndexWhileCrawling=false +allowDistributeIndexWhileIndexing=true +allowReceiveIndex=true +allowUnlimitedReceiveIndexFrom= +indexReceiveBlockBlacklist=true + +# the frequency is the number of links per minute, that the peer allowes +# _every_ other peer to send to this peer +defaultWordReceiveFrequency=100 +defaultLinkReceiveFrequency=30 +# the default may be overridden for each peer individually, these +# settings are only available through the online interface + +# prefetch parameters +# the prefetch depth assigns a specific depth to the prefetch mechanism +# prefetch of 0 means no prefetch; a prefetch of 1 means to prefetch all +# embedded URLs, but since embedded image links are loaded by the browser +# this means that only embedded anchors are prefetched additionally +# a prefetch of 2 would result in loading of all images and anchor pages +# of all embedded anchors. Be careful with this value, since even a prefetch +# of 2 would result in hundreds of prefetched URLs for each single proxy fill. +proxyPrefetchDepth=0 +proxyStoreHTCache=true +proxyIndexingRemote=false +proxyIndexingLocalText=true +proxyIndexingLocalMedia=true + +# proxy usage only for .yacy-Domains for autoconfig +proxyYacyOnly=false + +# enable proxy via url (/proxy.html?url=http://yacy.net) +proxyURL=false +proxyURL.access=127.0.0.1,0:0:0:0:0:0:0:1 +# which urls to rewrite to /proxy.html?url=x (values: all, domainlist) +proxyURL.rewriteURLs=domainlist +proxyURL.useforresults=false + +# From the 'IndexCreate' menu point you can also define a crawling start point. +# The crawling works the same way as the prefetch, but it is possible to +# assign a different crawling depth. +# Be careful with this number. Consider a branching factor of average 20; +# A prefetch-depth of 8 would index 25.600.000.000 pages, maybe the whole WWW. +crawlingDepth=3 +crawlingDirectDocByURL=true +crawlingIfOlder=-1 +crawlingDomFilterDepth=-1 +crawlingDomMaxPages=-1 +indexText=true +indexMedia=true + +# Filter for crawling; may be used to restrict a crawl to a specific domain +# URLs are only indexed and further crawled if they match this filter +crawlingFilter=.* +crawlingQ=false +storeHTCache=true +storeTXCache=true + +# peers may initiate remote crawling tasks. +# every peer may allow or disallow to be used as crawling-peer; +# you can also set a maximum crawl depth that can be requested or accepted +# order=parameters for requester; response=parameters for responder +# these values apply only for senior-senior - communication +# The delay value is number of seconds bewteen two separate orders +# crawlOrder: default value for remote crawl starts +# crawlResponse: set to true if a peer should retrieve remote crawl urls from other peers +crawlOrder=true +crawlOrderDepth=0 +crawlResponse=false +crawlResponseDepth=0 + +# indexing-exclusion - rules +# There rules are important to reduce the number of words that are indexed +# We distinguish three different sets of stop-words: +# static - excludes all words given in the file yacy.stopwords from indexing, +# dynamic - excludes all words from indexing which are listed by statistic rules, +# parental - excludes all words from indexing which had been indexed in the parent web page. +xsstopw=true +xdstopw=true +xpstopw=true + +# Topwords filtering +# If set to true, all stopwords (stopwords.yacy) are filtered from the topwords +# Change to false if requesting hits from peers with modified stopwords-file and using the unchanged client-version +filterOutStopwordsFromTopwords=true + +# crawling steering: must-match/must-not-match +crawlingIPMustMatch=.* +crawlingIPMustNotMatch= +# the default country codes are all codes for countries in Europe +crawlingCountryMustMatch=AD,AL,AT,BA,BE,BG,BY,CH,CY,CZ,DE,DK,EE,ES,FI,FO,FR,GG,GI,GR,HR,HU,IE,IM,IS,IT,JE,LI,LT,LU,LV,MC,MD,MK,MT,NL,NO,PL,PT,RO,RU,SE,SI,SJ,SK,SM,TR,UA,UK,VA,YU + +# collections for index data separation +# these collections can either be used to produce search tenants. +# The collection is used in the site-parameter in the GSA interface. +# Collections are assigned during crawl-time and defined in the crawl start. +# The YaCyScheme field collection_sxt must be switched on to use this field. +collection=user + +# performance-settings +# delay-times for permanent loops (milliseconds) +# the idlesleep is the pause that an proces sleeps if the last call to the +# process job was without execution of anything; +# the busysleep is the pause after a full job execution +# the prereq-value is a memory pre-requisite: that much bytes must +# be available/free in the heap; othervise the loop is not executed +# and another idlesleep is performed +20_dhtdistribution_idlesleep=30000 +20_dhtdistribution_busysleep=15000 +20_dhtdistribution_memprereq=12582912 +30_peerping_idlesleep=30000 +30_peerping_busysleep=30000 +30_peerping_memprereq=2097152 +40_peerseedcycle_idlesleep=1800000 +40_peerseedcycle_busysleep=1200000 +40_peerseedcycle_memprereq=4194304 +50_localcrawl_idlesleep=2000 +50_localcrawl_busysleep=20 +50_localcrawl_memprereq=12582912 +50_localcrawl_isPaused=false +60_remotecrawlloader_idlesleep=4000 +60_remotecrawlloader_busysleep=800 +60_remotecrawlloader_memprereq=12582912 +60_remotecrawlloader_isPaused=false +62_remotetriggeredcrawl_idlesleep=2000 +62_remotetriggeredcrawl_busysleep=200 +62_remotetriggeredcrawl_memprereq=12582912 +62_remotetriggeredcrawl_isPaused=false +70_surrogates_idlesleep=10000 +70_surrogates_busysleep=0 +70_surrogates_memprereq=12582912 +90_cleanup_idlesleep=300000 +90_cleanup_busysleep=300000 +90_cleanup_memprereq=0 + +# additional attributes: +# performanceIO is a percent-value. a value of 10 means, that 10% of the busysleep time +# is used to flush the RAM cache, which is the major part of the IO in YaCy +performanceProfile=defaults/yacy.init +performanceSpeed=100 +performanceIO=10 + +# cleanup-process: +# properties for tasks that are performed during cleanup +cleanup.deletionProcessedNews = true +cleanup.deletionPublishedNews = true +cleanup.failedSearchURLtimeout = 86400000 + + +# default memory settings for startup of yacy +# is valid in unix/shell and windows environments but +# not for first startup of YaCy + +# -Xmx and -Xms maximum/init Java heap size +# if a high performance for large search indexes is wanted, then setting the values to equal number is recommended +# if YaCy shall be nice in not-only-yacy environments, then the Xms value may be lower +javastart_Xmx=Xmx600m +javastart_Xms=Xms90m + +# YaCy is able to use RAM copies of database tables. This needs a lot of RAM. +# To switch on copying of file tables int RAM, there must be enough memory +# The memory that is available at startup time is used to switch the feature on +# The tableCachingLimit is the amount of free RAM at startup time to switch on the feature +tableCachingLimit=419430400 + +# some java versions may be limited to a specific array size +# of 134217727 entries. To prevent that tables of that size are generated, +# set this property to false +# If you want to have better performance and switch ramcopy on, try also to +# set this property to true +# this value is automatically set to true, if more than two gigabyte is available +exceed134217727=false + +# priority of the yacy-process +# is valid in unix/shell and windows environments but +# not for first startup of YaCy +# UNIX: corresponds to the nice-level +# WIN: -20=realtime;-15=high;-10=above;0=normal;10=below;20=low +javastart_priority=10 + +# performance properties for the word index cache +# wordCacheMaxLow/High is the number of word indexes that shall be held in the +# ram cache during indexing. If you want to increase indexing speed, increase this +# value i.e. up to one million, but increase also the memory limit to a minimum of 2GB +wordCacheMaxCount = 50000 + +# Specifies if yacy can be used as transparent http proxy. +# +# Please note that you also have to reconfigure your firewall +# before you can use yacy as transparent proxy. On linux this +# can be done like this: +# iptables -t nat -A PREROUTING -p tcp -s 192.168.0.0/16 \ +# --dport 80 -j DNAT --to 192.168.0.1:8090 +# +# With this iptables filter listed above all http traffic that +# comes from your private network (in this case 192.168.0.0) +# and goes to any webserver listening on port 80 will be forwarded +# by the firewall to yacy running on port 8090 (192.168.0.1:8090) +isTransparentProxy=false + +# Specifies if yacy should use the http connection keep-alive feature +connectionKeepAliveSupport=true + +# Specifies the timeout the proxy sould use +proxy.clientTimeout = 30000 + +# Specifies if the proxy should send the via header according to RFC +proxy.sendViaHeader=true + +# Specifies if the proxy should send the X-Forwarded-For header +proxy.sendXForwardedForHeader=true + +# Enable cookie monitoring +proxy.monitorCookies=false + +# msgForwarding: Specifies if yacy should forward received messages via +# email to the configured email address +msgForwardingEnabled=false +msgForwardingCmd=/usr/sbin/sendmail +msgForwardingTo=root@localhost + +#crawlPause: delay time after specific functions before crawling is resumed +crawlPause.proxy=10 +crawlPause.localsearch=50 +crawlPause.remotesearch=10 + +# Some configuration values for the crawler +crawler.clientTimeout=30000 + +# http crawler specific settings; size in bytes +crawler.http.accept=text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8 +crawler.http.acceptEncoding=gzip +crawler.http.acceptLanguage=en-us,en;q=0.5 +crawler.http.acceptCharset=ISO-8859-1,utf-8;q=0.7,*;q=0.7 +crawler.http.maxFileSize=10485760 +crawler.http.FollowRedirects=true +crawler.http.RecordRedirects=false + +# ftp crawler specific settings; size in bytes +crawler.ftp.maxFileSize=10485760 + +# smb crawler specific settings: maximum size +crawler.smb.maxFileSize=100000000 + +# smb crawler specific settings: maximum size +crawler.file.maxFileSize=100000000 + +# maximum number of crawler threads +crawler.MaxActiveThreads = 200 + +# maximum size of indexing queue +indexer.slots = 100 + +# maximum size of stacker queue +stacker.slots = 2000 + +# search options: show advanced options on main search page +search.options = true + +# search domains. If set to false then that search is not available +search.text = true +search.image = true +search.audio = true +search.video = true +search.app = false + +# number of search results displayed by default +search.items = 10 + +# target for search results; this is the href target attribute inside every search result link +# possible values: +# "_blank" (new window), "_self" (same window), "_parent" (the parent frame of a frameset), +# "_top" (top of all frames), "searchresult" (a default custom page name for search results) +# a special pattern can be given for exceptions to the default target according to urls +search.target = _self +search.target.special = _self +search.target.special.pattern = + +# search result lines may show additional information for each search hit +# these information pieces may be switched on or off +search.result.show.date = true +search.result.show.size = false +search.result.show.metadata = false +search.result.show.parser = false +search.result.show.pictures = false +search.result.show.cache = true +search.result.show.proxy = false +search.result.show.hostbrowser = true +search.result.show.tags = false + +# search navigators: comma-separated list of default values for search navigation. +# can be temporary different if search string is given with differen navigation values +# assigning no value(s) means that no navigation is shown +search.navigation=hosts,authors,namespace,topics,filetype,protocol + +# search result verification and snippet fetch caching rules +# each search result can be verified byloading the link from the web +# this can be enhanced using a cache. In some cases it may be appropriate +# to not verify the link at all and do not compute a snippet +# the possible cases are: +# nocache: no use of web cache, load all snippets online +# iffresh: use the cache if the cache exists and is fresh otherwise load online +# ifexist: use the cache if the cache exist or load online +# cacheonly: never go online, use all content from cache. If no cache entry exist, +# consider content nevertheless as available and show result without snippet +# false: no link verification and not snippet generation: + all search results are valid without verification +search.verify = ifexist + +search.excludehosts= +search.excludehosth= + +# in case that a link verification fails then the corresponding index reference can be +# deleted to clean up the index. If this property is set then failed index verification in +# the cases of nocache, iffresh and ifexist causes an index deletion +search.verify.delete = true + +# remote search details +remotesearch.maxcount = 20 +remotesearch.maxtime = 1000 + +# specifies if yacy should set it's own referer if no referer URL +# was set by the client. +useYacyReferer = false + +# allow only 443(https-port) for https-proxy? +# if you want to tunnel other protocols, set to false +secureHttps = true + +# specifies if the httpdFileHandler should cache +# the template-files from the htroot directory +enableTemplateCache = true + +# specifies if the http post body should be transfered +# using content-encoding gzip during index transfer +# a) indexDistribution: which is done periodically if you have enabled +# Index Distribution via IndexControl_p.html +# b) indexTransfer: which can be used to transfer the whole index of a peer +# this can be started via IndexTransfer_p.html +# c) indexControl: which can be triggered manually via IndexControl_p.html to +# transfer a chosen subset of the peer index +indexDistribution.gzipBody = true +indexTransfer.gzipBody = true +indexControl.gzipBody = true + +# defining timeouts for index- transfer/distribution/control +indexControl.timeout = 60000 +indexDistribution.timeout = 60000 +indexTransfer.timeout = 120000 + +# defining max. allowed amount of open files during index- transfer/distribution +indexDistribution.maxOpenFiles = 800 +indexTransfer.maxOpenFiles = 800 + +# sizes for index distribution +indexDistribution.minChunkSize = 10 +indexDistribution.maxChunkSize = 1000 +indexDistribution.startChunkSize = 200 +indexDistribution.maxChunkFails = 1 + +# limit of references per term & blob to the younges of this value +# a value of <= 0 disables this feature (no limit) +# a value of e.g. 100000 can improve stability and reduce load while searching very popular words +index.maxReferences = 0 + +# Search sequence settings +# collection: +# time = time to get a RWI out of RAM cache, assortments and WORDS files +# count = maximum number of RWI-entries that shall be collected +# +# join: +# time = time to perform the join between all collected RWIs +# count = maximum number of entries that shall be joined +# +# presort: +# time = time to do a sort of the joined URL-records +# count = maximum number of entries that shall be pre-sorted +# +# urlfetch: +# time = time to fetch the real URLs from the LURL database +# count = maximum number of urls that shall be fetched +# +# postsort: +# time = time for final sort of URLs +# count = maximum number oof URLs that shall be retrieved during sort +# +# filter: +# time = time to filter out unwanted urls (like redundant urls) +# count = maximum number of urls that shall be filtered +# +# snippetfetch: +# time = time to fetch snippets for selected URLs +# count = maximum number of snipptes to be fetched +# +# all values are percent +# time-percent is the percent of total search time +# count-percent is the percent of total wanted urls in result +# we distinguish local and remote search times +searchProcessLocalTime_c = 44 +searchProcessLocalCount_c = 10000000 +searchProcessLocalTime_j = 8 +searchProcessLocalCount_j = 1000000 +searchProcessLocalTime_r = 8 +searchProcessLocalCount_r =100000 +searchProcessLocalTime_u = 20 +searchProcessLocalCount_u = 10000 +searchProcessLocalTime_o = 10 +searchProcessLocalCount_o = 100 +searchProcessLocalTime_f = 5 +searchProcessLocalCount_f = 100 +searchProcessLocalTime_s = 5 +searchProcessLocalCount_s = 30 + +searchProcessRemoteTime_c = 44 +searchProcessRemoteCount_c = 1000000 +searchProcessRemoteTime_j = 8 +searchProcessRemoteCount_j = 1000000 +searchProcessRemoteTime_r = 8 +searchProcessRemoteCount_r = 1000 +searchProcessRemoteTime_u = 20 +searchProcessRemoteCount_u = 1000 +searchProcessRemoteTime_o = 10 +searchProcessRemoteCount_o = 1000 +searchProcessRemoteTime_f = 5 +searchProcessRemoteCount_f = 100 +searchProcessRemoteTime_s = 5 +searchProcessRemoteCount_s = 10 + +# timeouts for snippet fetching in ms +# timeout_text is for text-snippets, timeout_media for media, e.g. images +timeout_text = 10000 +timeout_media = 15000 + +# a list of domain name patterns that should not be cached by the httpc dns cache +httpc.nameCacheNoCachingPatterns = .*.ath.cx,.*.blogdns.*,.*.boldlygoingnowhere.org,.*.dnsalias.*,.*.dnsdojo.*,.*.dvrdns.org,.*.dyn-o-saur.com,.*.dynalias.*,.*.dyndns.*,.*.ftpaccess.cc,.*.game-host.org,.*.game-server.cc,.*.getmyip.com,.*.gotdns.*,.*.ham-radio-op.net,.*.hobby-site.com,.*.homedns.org,.*.homeftp.*,.*.homeip.net,.*.homelinux.*,.*.homeunix.*,.*.is-a-chef.*,.*.is-a-geek.*,.*.kicks-ass.*,.*.merseine.nu,.*.mine.nu,.*.myphotos.cc,.*.podzone.*,.*.scrapping.cc,.*.selfip.*,.*.servebbs.*,.*.serveftp.*,.*.servegame.org,.*.shacknet.nu + +#externalRedirectors +#squid Redirector compatible +externalRedirector= + +svnRevision=0 + +currentSkin=pdblue + +# flag to show if pages shall be usable for non-admin users +# this can be applied to the Surftips.html and yacysearch.html page +publicSurftips = true +publicSearchpage = true + +# flag to show if the top navigation bar shall be shown to all users +# if this is disabled, then the user must navigate manually from the search page +# to /Status.html to get the main memu bar back +publicTopmenu = true + +# flag if a small link to the administration sites on the right bottom of the index +# page should be shown to all users. +# you might want to enabled this, if publicTopmenu is disabled +publicAdministratorPi = false + +# Wiki access rights +# the built-in wiki system allows by default only that the administrator is allowed to make changes +# this can be changed. There are three options: +# admin - only the admin has write right +# all - everybody has write right +# user - the admin and every user registered in the user db has write right +WikiAccess = admin + +# Search Profiles +# we will support different search profiles +# this is currently only a single default profile +# If this profile setting is empty, a hard-coded profile from plasmaSearchRanking is used +search.ranking.rwi.profile = +search.ranking.solr.boost.tmp2= +search.ranking.solr.doubledetection.minlength=3 +search.ranking.solr.doubledetection.quantrate=0.5f + +#optional extern thumbnail program. +#the program must accept the invocation PROGRAM http://url /path/to/filename +thumbnailProgram = + +# settings for the peer's local robots.txt +# the following restrictions are possible (comma-separated): +# - all : entire domain is disallowed +# - blog : the blog-pages +# - bookmarks : the bookmark-page +# - dirs : all directories in htroot (standard setting, as there is no usable information in) +# - fileshare : all files in the peer's file share (DATA/HTDOCS/share) +# - homepage : all files on the peer's home page (DATA/HTDOCS/www) +# - locked : all servlets ending on '_p.*' (standard setting, as robots would need a password to access them anyways) +# - news : the news-page +# - network : the network-pages +# - status : peer's status page +# - surftips : the surftips-page +# - wiki : the wiki-page +httpd.robots.txt = locked,dirs,bookmarks,network,news,status,profile + +# class to use for parsing wikicode +wikiParser.class = de.anomic.data.wikiCode + +# settings for automatic deletion of old entries in passive and potential seed-db +# time means max time (in days) a peer may not have been seen before it is deleted +routing.deleteOldSeeds.permission = true +routing.deleteOldSeeds.time = 30 + +# options to remember the default search engines when using the search compare features +compare_yacy.left = YaCy +compare_yacy.right = metager.de + +# minimum free disk space for crawling (MiB) +disk.free = 3000 +# minimum for DHT +disk.free.hardlimit = 1000 + +# minimum memory to accept dht-in (MiB) +memory.acceptDHTabove = 50 +memory.disabledDHT = false + +# wether using standard memory strategy - or try generation memory strategy +memory.standardStrategy = true + +# setting if execution of CGI files is allowed or not +cgi.allow = false +cgi.suffixes = cgi,pl + +# content integration settings +content.phpbb3.urlstub = http:/// +content.phpbb3.dbtype = mysql +content.phpbb3.dbhost = localhost +content.phpbb3.dbport = 3306 +content.phpbb3.dbname = forum +content.phpbb3.tableprefix = phpbb_ +content.phpbb3.dbuser = notroot +content.phpbb3.dbpw = joshua +content.phpbb3.ppf = 1000 +content.phpbb3.dumpfile = + +# search engine teaser: an about box in search results +# this is only shown, if the about.body is filled +about.headline = +about.body = + +# search heuristics +heuristic.site = false +heuristic.blekko = false +heuristic.twitter = false +heuristic.searchresults = false +heuristic.searchresults.crawlglobal = false + +# colours for generic design +color_background = #FFFFFF +color_text = #18294A +color_legend = #65AC2A +color_tableheader = #5090D0 +color_tableitem = #DEE6F3 +color_tableitem2 = #ECF1F8 +color_tablebottom = #FFCCCC +color_borderline = #888888 +color_signbad = #990000 +color_signgood = #009900 +color_signother = #000099 +color_searchheadline = #2200CC +color_searchurl = #008000 +color_searchurlhover = #008000 + + +# federated index storage and federated search functionality +# federated search means that other search engines may be used together with the built-in indexing. +# each federated search may be able to be used as remote indexing service and/or as remote search service. +# a typical use case for a federated search is a concurrent search from opensearch sources. +# a typical use case for a remote indexing service is a remote solr index. YaCy supports remote solr indexes. + +# solr indexes can be filled if enabled is set to true +# the remote index scheme is the same as produced by the SolrCell; see http://wiki.apache.org/solr/ExtractingRequestHandler +# because this default scheme is used the default example scheme can be used as solr configuration +# to use this, do the following: +# - set federated.service.solr.indexing.enabled = true +# - download solr from http://www.apache.org/dyn/closer.cgi/lucene/solr/ +# - extract the solr (3.1) package, 'cd example' and start solr with 'java -jar start.jar' +# - start yacy and then start a crawler. The crawler will fill both, YaCy and solr indexes. +# - to check whats in solr after indexing, open http://localhost:8983/solr/admin/ +federated.service.solr.indexing.enabled = false +federated.service.solr.indexing.url = http://127.0.0.1:8983/solr +federated.service.solr.indexing.commitWithinMs = 180000 +federated.service.solr.indexing.sharding = MODULO_HOST_MD5 +federated.service.solr.indexing.schemefile = solr.keys.default.list +# the lazy attribute causes that fields containing "" or 0 are not added and not written +federated.service.solr.indexing.lazy = true + +# temporary definition of backend services to use. +# After the migration a rwi+solr combination is used, the solr contains the content of the previously used metadata-db. +# To get a handle for a migration, these values are defined as temporary, if the migration starts the values are renamed +# and defined with different default values. +# The citation service is used for ranking; this is a reverse linking index. It should be on before and after the migration. +# It can be switched off if only a remote solr index is used. +core.service.fulltext = true +core.service.rwi.tmp = true +core.service.citation.tmp = true + +# RDF triplestore settings +triplestore.persistent = true + +# Augmentation settings +parserAugmentation = false +parserAugmentation.RDFa = false +proxyAugmentation = false +augmentation.reflect = false +augmentation.addDoctype = false +augmentation.reparse = false + +# Content control settings +contentcontrol.enabled = false +contentcontrol.bookmarklist = contentcontrol +contentcontrol.mandatoryfilterlist = yacy +contentcontrol.smwimport.enabled = false +contentcontrol.smwimport.baseurl = +contentcontrol.smwimport.purgelistoninit = true +contentcontrol.smwimport.targetlist = contentcontrol +contentcontrol.smwimport.defaultcategory = yacy + +# Interaction settings +interaction.enabled = false +interaction.target = yacy + +interaction.feedback.enabled = true +interaction.feedback.url = +interaction.feedback.accept = false +interaction.usertracking.enabled = true +interaction.addcontent.enabled = false +interaction.userlogon.enabled = false +interaction.approvelist.enabled = false +interaction.suggestrejected.enabled = false +interaction.overlayinteraction.enabled = false + +interaction.globalmenu.enabled = true +interaction.portalconfigbutton.enabled = true +interaction.crawltraces.enabled = false +interaction.userselfregistration.enabled = false +interaction.forcebookmarkimport = +interaction.visiblecategories = all + +interaction.dontimportbookmarks = + +interaction.autocrawler.enabled = false +interaction.autocrawler.domainfilter = .* +interaction.autocrawler.categoryfilter = .* + +# host browser settings +browser.autoload = false +browser.load4everyone = false diff --git a/htroot/ConfigPortal.html b/htroot/ConfigPortal.html index ce2355111..ad8487564 100644 --- a/htroot/ConfigPortal.html +++ b/htroot/ConfigPortal.html @@ -1,227 +1,228 @@ - - - - - YaCy '#[clientname]#': Integration of a Search Portal - #%env/templates/metas.template%# - - - #%env/templates/header.template%# - #%env/templates/submenuSearchConfiguration.template%# -

Integration of a Search Portal

-

- If you like to integrate YaCy as portal for your web pages, you may want to change icons and messages on the search page. - The search page may be customized. You can change the 'corporate identity'-images, the greeting line - and a link to a home page that is reached when the 'corporate identity'-images are clicked. - To change also colours and styles use the Appearance Servlet for different skins and languages. -

-
-
-
-
Greeting Line
-
- -
URL of Home Page
-
- -
URL of a Small Corporate Image
-
- -
URL of a Large Corporate Image
-
- -
Enable Search for Everyone?
-
- Search is available for everyone  - Only the administator is allowed to search -
- -
Show Navigation Bar on Search Page?
-
- Show Navigation Top-Menu  - no link to YaCy Menu (admin must navigate to /Status.html manually) -
- -
Show Advanced Search Options on Search Page?
-
- Show Advanced Search Options on index.html  - do not show Advanced Search -
- -
Show Media Search Options
-
- Text  - Images  - Audio  - Video  - Applications -
- -
Show additional interaction features in footer
-
- User-Logon  -
- -
Snippet Fetch Strategy & Link Verification
-
- ideaSpeed up search results with this option! (use CACHEONLY or FALSE to switch off verification)
- NOCACHE: no use of web cache, load all snippets online
- IFFRESH: use the cache if the cache exists and is fresh otherwise load online
- IFEXIST: use the cache if the cache exist or load online
- If verification fails, delete index reference

- CACHEONLY: never go online, use all content from cache. If no cache entry exist, consider content nevertheless as available and show result without snippet
- FALSE: no link verification and not snippet generation: all search results are valid without verification -
- -
Show Information Links for each Search Result Entry
-
- Date  - Size  - Metadata  - Parser  - Pictures  - Cache - Augmented Browsing - Tags -
- -
Show Navigation on Side-Bar
-
- Host Navigation  - Author Navigation  - Wiki Name-Space Navigation  - Topics (Tag-Cloud) Navigation  -
- -
Default Pop-Up Page
-
- Status Page  - Search Front Page  - Search Page (small header)  - Interactive Search Page  -
- -
Default maximum number of results per page
-
- -
Default index.html Page (by forwarder)
-
- -
Target for Click on Search Results
-
- -
- -
Special Target as Exception for an URL-Pattern
-
- -  Pattern: -
- -
Exclude Hosts
-
List of hosts that shall be excluded from search results by default but can be included using the site:<host> operator:
-
- #[search.excludehosth]# -
- -
'About' Column
(shown in a column alongside
with the search result page)
-
(Headline)
- (Content) -
- -
 
-
-    - -

You have to set a remote user/password to change this options.

-
-
-
-
- -

- The search page can be integrated in your own web pages with an iframe. Simply use the following code: -

-
-      <iframe name="target"
-       src="http://#[myaddress]#/index.html?display=2&resource=local"
-       width="100%"
-       height="410"
-       frameborder="0"
-       scrolling="auto"
-       id="target"> 
-      </iframe>  
-    
- This would look like: - - -

- For a search page with a small header, use this code: -

-
-      <iframe name="target2"
-       src="http://#[myaddress]#/yacysearch.html?display=2&resource=local"
-       width="100%"
-       height="180"
-       frameborder="0"
-       scrolling="auto"
-       id="target2"> 
-      </iframe>  
-    
- This would look like: - - -

- A third option is the interactive search. Use this code: -

-
-      <iframe name="target3"
-       src="http://#[myaddress]#/yacyinteractive.html?display=2"
-       width="100%"
-       height="180"
-       frameborder="0"
-       scrolling="auto"
-       id="target3"> 
-      </iframe>  
-    
- This would look like: - - - #%env/templates/footer.template%# - - + + + + + YaCy '#[clientname]#': Integration of a Search Portal + #%env/templates/metas.template%# + + + #%env/templates/header.template%# + #%env/templates/submenuSearchConfiguration.template%# +

Integration of a Search Portal

+

+ If you like to integrate YaCy as portal for your web pages, you may want to change icons and messages on the search page. + The search page may be customized. You can change the 'corporate identity'-images, the greeting line + and a link to a home page that is reached when the 'corporate identity'-images are clicked. + To change also colours and styles use the Appearance Servlet for different skins and languages. +

+
+
+
+
Greeting Line
+
+ +
URL of Home Page
+
+ +
URL of a Small Corporate Image
+
+ +
URL of a Large Corporate Image
+
+ +
Enable Search for Everyone?
+
+ Search is available for everyone  + Only the administator is allowed to search +
+ +
Show Navigation Bar on Search Page?
+
+ Show Navigation Top-Menu  + no link to YaCy Menu (admin must navigate to /Status.html manually) +
+ +
Show Advanced Search Options on Search Page?
+
+ Show Advanced Search Options on index.html  + do not show Advanced Search +
+ +
Show Media Search Options
+
+ Text  + Images  + Audio  + Video  + Applications +
+ +
Show additional interaction features in footer
+
+ User-Logon  +
+ +
Snippet Fetch Strategy & Link Verification
+
+ ideaSpeed up search results with this option! (use CACHEONLY or FALSE to switch off verification)
+ NOCACHE: no use of web cache, load all snippets online
+ IFFRESH: use the cache if the cache exists and is fresh otherwise load online
+ IFEXIST: use the cache if the cache exist or load online
+ If verification fails, delete index reference

+ CACHEONLY: never go online, use all content from cache. If no cache entry exist, consider content nevertheless as available and show result without snippet
+ FALSE: no link verification and not snippet generation: all search results are valid without verification +
+ +
Show Information Links for each Search Result Entry
+
+ Date  + Size  + Metadata  + Parser  + Pictures  + Cache + Augmented Browsing + Host Browser  + Tags +
+ +
Show Navigation on Side-Bar
+
+ Host Navigation  + Author Navigation  + Wiki Name-Space Navigation  + Topics (Tag-Cloud) Navigation  +
+ +
Default Pop-Up Page
+
+ Status Page  + Search Front Page  + Search Page (small header)  + Interactive Search Page  +
+ +
Default maximum number of results per page
+
+ +
Default index.html Page (by forwarder)
+
+ +
Target for Click on Search Results
+
+ +
+ +
Special Target as Exception for an URL-Pattern
+
+ +  Pattern: +
+ +
Exclude Hosts
+
List of hosts that shall be excluded from search results by default but can be included using the site:<host> operator:
+
+ #[search.excludehosth]# +
+ +
'About' Column
(shown in a column alongside
with the search result page)
+
(Headline)
+ (Content) +
+ +
 
+
+    + +

You have to set a remote user/password to change this options.

+
+
+
+
+ +

+ The search page can be integrated in your own web pages with an iframe. Simply use the following code: +

+
+      <iframe name="target"
+       src="http://#[myaddress]#/index.html?display=2&resource=local"
+       width="100%"
+       height="410"
+       frameborder="0"
+       scrolling="auto"
+       id="target"> 
+      </iframe>  
+    
+ This would look like: + + +

+ For a search page with a small header, use this code: +

+
+      <iframe name="target2"
+       src="http://#[myaddress]#/yacysearch.html?display=2&resource=local"
+       width="100%"
+       height="180"
+       frameborder="0"
+       scrolling="auto"
+       id="target2"> 
+      </iframe>  
+    
+ This would look like: + + +

+ A third option is the interactive search. Use this code: +

+
+      <iframe name="target3"
+       src="http://#[myaddress]#/yacyinteractive.html?display=2"
+       width="100%"
+       height="180"
+       frameborder="0"
+       scrolling="auto"
+       id="target3"> 
+      </iframe>  
+    
+ This would look like: + + + #%env/templates/footer.template%# + + diff --git a/htroot/ConfigPortal.java b/htroot/ConfigPortal.java index 94f16f50d..02d51fc27 100644 --- a/htroot/ConfigPortal.java +++ b/htroot/ConfigPortal.java @@ -1,241 +1,244 @@ -// ConfigPortal.java -// ----------------------- -// part of YaCy -// (C) by Michael Peter Christen; mc@yacy.net -// first published on http://yacy.net -// Frankfurt, Germany, 4.7.2008 -// -//$LastChangedDate$ -//$LastChangedRevision$ -//$LastChangedBy$ -// -// LICENSE -// -// This program is free software; you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation; either version 2 of the License, or -// (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. -// -// You should have received a copy of the GNU General Public License -// along with this program; if not, write to the Free Software -// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -import net.yacy.cora.protocol.RequestHeader; -import net.yacy.data.WorkTables; -import net.yacy.kelondro.data.meta.DigestURI; -import net.yacy.search.Switchboard; -import net.yacy.search.SwitchboardConstants; -import net.yacy.server.serverObjects; -import net.yacy.server.serverSwitch; -import net.yacy.server.http.HTTPDFileHandler; - -public class ConfigPortal { - - public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) { - final serverObjects prop = new serverObjects(); - final Switchboard sb = (Switchboard) env; - - if (post != null) { - // AUTHENTICATE - if (!sb.verifyAuthentication(header)) { - // force log-in - prop.authenticationRequired(); - return prop; - } - - if (post.containsKey("popup")) { - final String popup = post.get("popup", "status"); - if ("front".equals(popup)) { - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "index.html"); - } else if ("search".equals(popup)) { - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "yacysearch.html"); - } else if ("interactive".equals(popup)) { - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "yacyinteractive.html"); - } else { - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "Status.html"); - } - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, sb.getConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "index.html")); - HTTPDFileHandler.initDefaultPath(); - } - if (post.containsKey("searchpage_set")) { - final String newGreeting = post.get(SwitchboardConstants.GREETING, ""); - // store this call as api call - sb.tables.recordAPICall(post, "ConfigPortal.html", WorkTables.TABLE_API_TYPE_CONFIGURATION, "new portal design. greeting: " + newGreeting); - - sb.setConfig(SwitchboardConstants.GREETING, newGreeting); - sb.setConfig(SwitchboardConstants.GREETING_HOMEPAGE, post.get(SwitchboardConstants.GREETING_HOMEPAGE, "")); - sb.setConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, post.get(SwitchboardConstants.GREETING_LARGE_IMAGE, "")); - sb.setConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, post.get(SwitchboardConstants.GREETING_SMALL_IMAGE, "")); - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, post.get("target", "_self")); - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, post.get("target_special", "_self")); - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, post.get("target_special_pattern", "_self")); - sb.setConfig(SwitchboardConstants.SEARCH_ITEMS, post.getInt("maximumRecords", 10)); - sb.setConfig(SwitchboardConstants.INDEX_FORWARD, post.get(SwitchboardConstants.INDEX_FORWARD, "")); - HTTPDFileHandler.indexForward = post.get(SwitchboardConstants.INDEX_FORWARD, ""); - sb.setConfig("publicTopmenu", !post.containsKey("publicTopmenu") || post.getBoolean("publicTopmenu")); - sb.setConfig("publicSearchpage", !post.containsKey("publicSearchpage") || post.getBoolean("publicSearchpage")); - sb.setConfig("search.options", post.getBoolean("search.options")); - - sb.setConfig("interaction.userlogon.enabled", post.getBoolean("interaction.userlogon")); - - sb.setConfig("search.text", post.getBoolean("search.text")); - sb.setConfig("search.image", post.getBoolean("search.image")); - sb.setConfig("search.audio", post.getBoolean("search.audio")); - sb.setConfig("search.video", post.getBoolean("search.video")); - sb.setConfig("search.app", post.getBoolean("search.app")); - - sb.setConfig("search.result.show.date", post.getBoolean("search.result.show.date")); - sb.setConfig("search.result.show.size", post.getBoolean("search.result.show.size")); - sb.setConfig("search.result.show.metadata", post.getBoolean("search.result.show.metadata")); - sb.setConfig("search.result.show.parser", post.getBoolean("search.result.show.parser")); - sb.setConfig("search.result.show.pictures", post.getBoolean("search.result.show.pictures")); - sb.setConfig("search.result.show.cache", post.getBoolean("search.result.show.cache")); - sb.setConfig("search.result.show.proxy", post.getBoolean("search.result.show.proxy")); - sb.setConfig("search.result.show.tags", post.getBoolean("search.result.show.tags")); - - sb.setConfig(SwitchboardConstants.SEARCH_VERIFY, post.get("search.verify", "ifexist")); - sb.setConfig(SwitchboardConstants.SEARCH_VERIFY_DELETE, post.getBoolean("search.verify.delete")); - - sb.setConfig("about.headline", post.get("about.headline", "")); - sb.setConfig("about.body", post.get("about.body", "")); - - String excludehosts = post.get("search.excludehosts", ""); - sb.setConfig("search.excludehosts", excludehosts); - sb.setConfig("search.excludehosth", DigestURI.hosthashes(excludehosts)); - - // construct navigation String - String nav = ""; - if (post.getBoolean("search.navigation.hosts")) nav += "hosts,"; - if (post.getBoolean("search.navigation.authors")) nav += "authors,"; - if (post.getBoolean("search.navigation.namespace")) nav += "namespace,"; - if (post.getBoolean("search.navigation.topics")) nav += "topics,"; - if (nav.endsWith(",")) nav = nav.substring(0, nav.length() - 1); sb.setConfig("search.navigation", nav); - } - if (post.containsKey("searchpage_default")) { - sb.setConfig(SwitchboardConstants.GREETING, "P2P Web Search"); - sb.setConfig(SwitchboardConstants.GREETING_HOMEPAGE, "http://yacy.net"); - sb.setConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, "/env/grafics/YaCyLogo_120ppi.png"); - sb.setConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, "/env/grafics/YaCyLogo_60ppi.png"); - sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "Status.html"); - sb.setConfig(SwitchboardConstants.INDEX_FORWARD, ""); - HTTPDFileHandler.indexForward = ""; - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, "_self"); - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, "_self"); - sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, ""); - sb.setConfig("publicTopmenu", true); - sb.setConfig("publicSearchpage", true); - sb.setConfig("search.navigation", "hosts,authors,namespace,topics"); - sb.setConfig("search.options", true); - sb.setConfig("interaction.userlogon.enabled", false); - sb.setConfig("search.text", true); - sb.setConfig("search.image", true); - sb.setConfig("search.audio", false); - sb.setConfig("search.video", false); - sb.setConfig("search.app", false); - sb.setConfig("search.result.show.date", true); - sb.setConfig("search.result.show.size", false); - sb.setConfig("search.result.show.metadata", false); - sb.setConfig("search.result.show.parser", false); - sb.setConfig("search.result.show.pictures", false); - sb.setConfig("search.result.show.cache", true); - sb.setConfig("search.result.show.proxy", false); - sb.setConfig("search.result.show.tags", false); - sb.setConfig(SwitchboardConstants.SEARCH_VERIFY, "iffresh"); - sb.setConfig(SwitchboardConstants.SEARCH_VERIFY_DELETE, "true"); - sb.setConfig("about.headline", ""); - sb.setConfig("about.body", ""); - sb.setConfig("search.excludehosts", ""); - sb.setConfig("search.excludehosth", ""); - } - } - - prop.putHTML(SwitchboardConstants.GREETING, sb.getConfig(SwitchboardConstants.GREETING, "")); - prop.putHTML(SwitchboardConstants.GREETING_HOMEPAGE, sb.getConfig(SwitchboardConstants.GREETING_HOMEPAGE, "")); - prop.putHTML(SwitchboardConstants.GREETING_LARGE_IMAGE, sb.getConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, "")); - prop.putHTML(SwitchboardConstants.GREETING_SMALL_IMAGE, sb.getConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, "")); - prop.putHTML(SwitchboardConstants.INDEX_FORWARD, sb.getConfig(SwitchboardConstants.INDEX_FORWARD, "")); - prop.put("publicTopmenu", sb.getConfigBool("publicTopmenu", false) ? 1 : 0); - prop.put("publicSearchpage", sb.getConfigBool("publicSearchpage", false) ? 1 : 0); - prop.put("search.options", sb.getConfigBool("search.options", false) ? 1 : 0); - - prop.put("interaction.userlogon", sb.getConfigBool("interaction.userlogon.enabled", false) ? 1 : 0); - - prop.put("search.text", sb.getConfigBool("search.text", false) ? 1 : 0); - prop.put("search.image", sb.getConfigBool("search.image", false) ? 1 : 0); - prop.put("search.audio", sb.getConfigBool("search.audio", false) ? 1 : 0); - prop.put("search.video", sb.getConfigBool("search.video", false) ? 1 : 0); - prop.put("search.app", sb.getConfigBool("search.app", false) ? 1 : 0); - - prop.put("search.result.show.date", sb.getConfigBool("search.result.show.date", false) ? 1 : 0); - prop.put("search.result.show.size", sb.getConfigBool("search.result.show.size", false) ? 1 : 0); - prop.put("search.result.show.metadata", sb.getConfigBool("search.result.show.metadata", false) ? 1 : 0); - prop.put("search.result.show.parser", sb.getConfigBool("search.result.show.parser", false) ? 1 : 0); - prop.put("search.result.show.pictures", sb.getConfigBool("search.result.show.pictures", false) ? 1 : 0); - prop.put("search.result.show.cache", sb.getConfigBool("search.result.show.cache", false) ? 1 : 0); - prop.put("search.result.show.proxy", sb.getConfigBool("search.result.show.proxy", false) ? 1 : 0); - prop.put("search.result.show.tags", sb.getConfigBool("search.result.show.tags", false) ? 1 : 0); - - prop.put("search.navigation.hosts", sb.getConfig("search.navigation", "").indexOf("hosts",0) >= 0 ? 1 : 0); - prop.put("search.navigation.authors", sb.getConfig("search.navigation", "").indexOf("authors",0) >= 0 ? 1 : 0); - prop.put("search.navigation.namespace", sb.getConfig("search.navigation", "").indexOf("namespace",0) >= 0 ? 1 : 0); - prop.put("search.navigation.topics", sb.getConfig("search.navigation", "").indexOf("topics",0) >= 0 ? 1 : 0); - - prop.put("search.verify.nocache", sb.getConfig("search.verify", "").equals("nocache") ? 1 : 0); - prop.put("search.verify.iffresh", sb.getConfig("search.verify", "").equals("iffresh") ? 1 : 0); - prop.put("search.verify.ifexist", sb.getConfig("search.verify", "").equals("ifexist") ? 1 : 0); - prop.put("search.verify.cacheonly", sb.getConfig("search.verify", "").equals("cacheonly") ? 1 : 0); - prop.put("search.verify.false", sb.getConfig("search.verify", "").equals("false") ? 1 : 0); - prop.put("search.verify.delete", sb.getConfigBool(SwitchboardConstants.SEARCH_VERIFY_DELETE, true) ? 1 : 0); - - prop.put("about.headline", sb.getConfig("about.headline", "")); - prop.put("about.body", sb.getConfig("about.body", "")); - - prop.put("search.excludehosts", sb.getConfig("search.excludehosts", "")); - prop.put("search.excludehosth", sb.getConfig("search.excludehosth", "")); - - final String browserPopUpPage = sb.getConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "ConfigBasic.html"); - prop.put("popupFront", 0); - prop.put("popupSearch", 0); - prop.put("popupInteractive", 0); - prop.put("popupStatus", 0); - if (browserPopUpPage.startsWith("index")) { - prop.put("popupFront", 1); - } else if (browserPopUpPage.startsWith("yacysearch")) { - prop.put("popupSearch", 1); - } else if (browserPopUpPage.startsWith("yacyinteractive")) { - prop.put("popupInteractive", 1); - } else { - prop.put("popupStatus", 1); - } - - prop.put("maximumRecords", sb.getConfigInt(SwitchboardConstants.SEARCH_ITEMS, 10)); - - final String target = sb.getConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, "_self"); - prop.put("target_selected_blank", "_blank".equals(target) ? 1 : 0); - prop.put("target_selected_self", "_self".equals(target) ? 1 : 0); - prop.put("target_selected_parent", "_parent".equals(target) ? 1 : 0); - prop.put("target_selected_top", "_top".equals(target) ? 1 : 0); - prop.put("target_selected_searchresult", "searchresult".equals(target) ? 1 : 0); - - final String target_special = sb.getConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, "_self"); - prop.put("target_selected_special_blank", "_blank".equals(target_special) ? 1 : 0); - prop.put("target_selected_special_self", "_self".equals(target_special) ? 1 : 0); - prop.put("target_selected_special_parent", "_parent".equals(target_special) ? 1 : 0); - prop.put("target_selected_special_top", "_top".equals(target_special) ? 1 : 0); - prop.put("target_selected_special_searchresult", "searchresult".equals(target_special) ? 1 : 0); - prop.put("target_special_pattern", sb.getConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, "")); - - String myaddress = (sb.peers == null) ? null : sb.peers.mySeed() == null ? null : sb.peers.mySeed().getPublicAddress(); - if (myaddress == null) { - myaddress = "localhost:" + sb.getConfig("port", "8090"); - } - prop.put("myaddress", myaddress); - return prop; - } - -} +// ConfigPortal.java +// ----------------------- +// part of YaCy +// (C) by Michael Peter Christen; mc@yacy.net +// first published on http://yacy.net +// Frankfurt, Germany, 4.7.2008 +// +//$LastChangedDate$ +//$LastChangedRevision$ +//$LastChangedBy$ +// +// LICENSE +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +import net.yacy.cora.protocol.RequestHeader; +import net.yacy.data.WorkTables; +import net.yacy.kelondro.data.meta.DigestURI; +import net.yacy.search.Switchboard; +import net.yacy.search.SwitchboardConstants; +import net.yacy.server.serverObjects; +import net.yacy.server.serverSwitch; +import net.yacy.server.http.HTTPDFileHandler; + +public class ConfigPortal { + + public static serverObjects respond(final RequestHeader header, final serverObjects post, final serverSwitch env) { + final serverObjects prop = new serverObjects(); + final Switchboard sb = (Switchboard) env; + + if (post != null) { + // AUTHENTICATE + if (!sb.verifyAuthentication(header)) { + // force log-in + prop.authenticationRequired(); + return prop; + } + + if (post.containsKey("popup")) { + final String popup = post.get("popup", "status"); + if ("front".equals(popup)) { + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "index.html"); + } else if ("search".equals(popup)) { + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "yacysearch.html"); + } else if ("interactive".equals(popup)) { + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "yacyinteractive.html"); + } else { + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "Status.html"); + } + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, sb.getConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "index.html")); + HTTPDFileHandler.initDefaultPath(); + } + if (post.containsKey("searchpage_set")) { + final String newGreeting = post.get(SwitchboardConstants.GREETING, ""); + // store this call as api call + sb.tables.recordAPICall(post, "ConfigPortal.html", WorkTables.TABLE_API_TYPE_CONFIGURATION, "new portal design. greeting: " + newGreeting); + + sb.setConfig(SwitchboardConstants.GREETING, newGreeting); + sb.setConfig(SwitchboardConstants.GREETING_HOMEPAGE, post.get(SwitchboardConstants.GREETING_HOMEPAGE, "")); + sb.setConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, post.get(SwitchboardConstants.GREETING_LARGE_IMAGE, "")); + sb.setConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, post.get(SwitchboardConstants.GREETING_SMALL_IMAGE, "")); + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, post.get("target", "_self")); + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, post.get("target_special", "_self")); + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, post.get("target_special_pattern", "_self")); + sb.setConfig(SwitchboardConstants.SEARCH_ITEMS, post.getInt("maximumRecords", 10)); + sb.setConfig(SwitchboardConstants.INDEX_FORWARD, post.get(SwitchboardConstants.INDEX_FORWARD, "")); + HTTPDFileHandler.indexForward = post.get(SwitchboardConstants.INDEX_FORWARD, ""); + sb.setConfig("publicTopmenu", !post.containsKey("publicTopmenu") || post.getBoolean("publicTopmenu")); + sb.setConfig("publicSearchpage", !post.containsKey("publicSearchpage") || post.getBoolean("publicSearchpage")); + sb.setConfig("search.options", post.getBoolean("search.options")); + + sb.setConfig("interaction.userlogon.enabled", post.getBoolean("interaction.userlogon")); + + sb.setConfig("search.text", post.getBoolean("search.text")); + sb.setConfig("search.image", post.getBoolean("search.image")); + sb.setConfig("search.audio", post.getBoolean("search.audio")); + sb.setConfig("search.video", post.getBoolean("search.video")); + sb.setConfig("search.app", post.getBoolean("search.app")); + + sb.setConfig("search.result.show.date", post.getBoolean("search.result.show.date")); + sb.setConfig("search.result.show.size", post.getBoolean("search.result.show.size")); + sb.setConfig("search.result.show.metadata", post.getBoolean("search.result.show.metadata")); + sb.setConfig("search.result.show.parser", post.getBoolean("search.result.show.parser")); + sb.setConfig("search.result.show.pictures", post.getBoolean("search.result.show.pictures")); + sb.setConfig("search.result.show.cache", post.getBoolean("search.result.show.cache")); + sb.setConfig("search.result.show.proxy", post.getBoolean("search.result.show.proxy")); + sb.setConfig("search.result.show.hostbrowser", post.getBoolean("search.result.show.hostbrowser")); + sb.setConfig("search.result.show.tags", post.getBoolean("search.result.show.tags")); + + sb.setConfig(SwitchboardConstants.SEARCH_VERIFY, post.get("search.verify", "ifexist")); + sb.setConfig(SwitchboardConstants.SEARCH_VERIFY_DELETE, post.getBoolean("search.verify.delete")); + + sb.setConfig("about.headline", post.get("about.headline", "")); + sb.setConfig("about.body", post.get("about.body", "")); + + String excludehosts = post.get("search.excludehosts", ""); + sb.setConfig("search.excludehosts", excludehosts); + sb.setConfig("search.excludehosth", DigestURI.hosthashes(excludehosts)); + + // construct navigation String + String nav = ""; + if (post.getBoolean("search.navigation.hosts")) nav += "hosts,"; + if (post.getBoolean("search.navigation.authors")) nav += "authors,"; + if (post.getBoolean("search.navigation.namespace")) nav += "namespace,"; + if (post.getBoolean("search.navigation.topics")) nav += "topics,"; + if (nav.endsWith(",")) nav = nav.substring(0, nav.length() - 1); sb.setConfig("search.navigation", nav); + } + if (post.containsKey("searchpage_default")) { + sb.setConfig(SwitchboardConstants.GREETING, "P2P Web Search"); + sb.setConfig(SwitchboardConstants.GREETING_HOMEPAGE, "http://yacy.net"); + sb.setConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, "/env/grafics/YaCyLogo_120ppi.png"); + sb.setConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, "/env/grafics/YaCyLogo_60ppi.png"); + sb.setConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "Status.html"); + sb.setConfig(SwitchboardConstants.INDEX_FORWARD, ""); + HTTPDFileHandler.indexForward = ""; + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, "_self"); + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, "_self"); + sb.setConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, ""); + sb.setConfig("publicTopmenu", true); + sb.setConfig("publicSearchpage", true); + sb.setConfig("search.navigation", "hosts,authors,namespace,topics"); + sb.setConfig("search.options", true); + sb.setConfig("interaction.userlogon.enabled", false); + sb.setConfig("search.text", true); + sb.setConfig("search.image", true); + sb.setConfig("search.audio", false); + sb.setConfig("search.video", false); + sb.setConfig("search.app", false); + sb.setConfig("search.result.show.date", true); + sb.setConfig("search.result.show.size", false); + sb.setConfig("search.result.show.metadata", false); + sb.setConfig("search.result.show.parser", false); + sb.setConfig("search.result.show.pictures", false); + sb.setConfig("search.result.show.cache", true); + sb.setConfig("search.result.show.proxy", false); + sb.setConfig("search.result.show.hostbrowser", true); + sb.setConfig("search.result.show.tags", false); + sb.setConfig(SwitchboardConstants.SEARCH_VERIFY, "iffresh"); + sb.setConfig(SwitchboardConstants.SEARCH_VERIFY_DELETE, "true"); + sb.setConfig("about.headline", ""); + sb.setConfig("about.body", ""); + sb.setConfig("search.excludehosts", ""); + sb.setConfig("search.excludehosth", ""); + } + } + + prop.putHTML(SwitchboardConstants.GREETING, sb.getConfig(SwitchboardConstants.GREETING, "")); + prop.putHTML(SwitchboardConstants.GREETING_HOMEPAGE, sb.getConfig(SwitchboardConstants.GREETING_HOMEPAGE, "")); + prop.putHTML(SwitchboardConstants.GREETING_LARGE_IMAGE, sb.getConfig(SwitchboardConstants.GREETING_LARGE_IMAGE, "")); + prop.putHTML(SwitchboardConstants.GREETING_SMALL_IMAGE, sb.getConfig(SwitchboardConstants.GREETING_SMALL_IMAGE, "")); + prop.putHTML(SwitchboardConstants.INDEX_FORWARD, sb.getConfig(SwitchboardConstants.INDEX_FORWARD, "")); + prop.put("publicTopmenu", sb.getConfigBool("publicTopmenu", false) ? 1 : 0); + prop.put("publicSearchpage", sb.getConfigBool("publicSearchpage", false) ? 1 : 0); + prop.put("search.options", sb.getConfigBool("search.options", false) ? 1 : 0); + + prop.put("interaction.userlogon", sb.getConfigBool("interaction.userlogon.enabled", false) ? 1 : 0); + + prop.put("search.text", sb.getConfigBool("search.text", false) ? 1 : 0); + prop.put("search.image", sb.getConfigBool("search.image", false) ? 1 : 0); + prop.put("search.audio", sb.getConfigBool("search.audio", false) ? 1 : 0); + prop.put("search.video", sb.getConfigBool("search.video", false) ? 1 : 0); + prop.put("search.app", sb.getConfigBool("search.app", false) ? 1 : 0); + + prop.put("search.result.show.date", sb.getConfigBool("search.result.show.date", false) ? 1 : 0); + prop.put("search.result.show.size", sb.getConfigBool("search.result.show.size", false) ? 1 : 0); + prop.put("search.result.show.metadata", sb.getConfigBool("search.result.show.metadata", false) ? 1 : 0); + prop.put("search.result.show.parser", sb.getConfigBool("search.result.show.parser", false) ? 1 : 0); + prop.put("search.result.show.pictures", sb.getConfigBool("search.result.show.pictures", false) ? 1 : 0); + prop.put("search.result.show.cache", sb.getConfigBool("search.result.show.cache", false) ? 1 : 0); + prop.put("search.result.show.proxy", sb.getConfigBool("search.result.show.proxy", false) ? 1 : 0); + prop.put("search.result.show.hostbrowser", sb.getConfigBool("search.result.show.hostbrowser", false) ? 1 : 0); + prop.put("search.result.show.tags", sb.getConfigBool("search.result.show.tags", false) ? 1 : 0); + + prop.put("search.navigation.hosts", sb.getConfig("search.navigation", "").indexOf("hosts",0) >= 0 ? 1 : 0); + prop.put("search.navigation.authors", sb.getConfig("search.navigation", "").indexOf("authors",0) >= 0 ? 1 : 0); + prop.put("search.navigation.namespace", sb.getConfig("search.navigation", "").indexOf("namespace",0) >= 0 ? 1 : 0); + prop.put("search.navigation.topics", sb.getConfig("search.navigation", "").indexOf("topics",0) >= 0 ? 1 : 0); + + prop.put("search.verify.nocache", sb.getConfig("search.verify", "").equals("nocache") ? 1 : 0); + prop.put("search.verify.iffresh", sb.getConfig("search.verify", "").equals("iffresh") ? 1 : 0); + prop.put("search.verify.ifexist", sb.getConfig("search.verify", "").equals("ifexist") ? 1 : 0); + prop.put("search.verify.cacheonly", sb.getConfig("search.verify", "").equals("cacheonly") ? 1 : 0); + prop.put("search.verify.false", sb.getConfig("search.verify", "").equals("false") ? 1 : 0); + prop.put("search.verify.delete", sb.getConfigBool(SwitchboardConstants.SEARCH_VERIFY_DELETE, true) ? 1 : 0); + + prop.put("about.headline", sb.getConfig("about.headline", "")); + prop.put("about.body", sb.getConfig("about.body", "")); + + prop.put("search.excludehosts", sb.getConfig("search.excludehosts", "")); + prop.put("search.excludehosth", sb.getConfig("search.excludehosth", "")); + + final String browserPopUpPage = sb.getConfig(SwitchboardConstants.BROWSER_POP_UP_PAGE, "ConfigBasic.html"); + prop.put("popupFront", 0); + prop.put("popupSearch", 0); + prop.put("popupInteractive", 0); + prop.put("popupStatus", 0); + if (browserPopUpPage.startsWith("index")) { + prop.put("popupFront", 1); + } else if (browserPopUpPage.startsWith("yacysearch")) { + prop.put("popupSearch", 1); + } else if (browserPopUpPage.startsWith("yacyinteractive")) { + prop.put("popupInteractive", 1); + } else { + prop.put("popupStatus", 1); + } + + prop.put("maximumRecords", sb.getConfigInt(SwitchboardConstants.SEARCH_ITEMS, 10)); + + final String target = sb.getConfig(SwitchboardConstants.SEARCH_TARGET_DEFAULT, "_self"); + prop.put("target_selected_blank", "_blank".equals(target) ? 1 : 0); + prop.put("target_selected_self", "_self".equals(target) ? 1 : 0); + prop.put("target_selected_parent", "_parent".equals(target) ? 1 : 0); + prop.put("target_selected_top", "_top".equals(target) ? 1 : 0); + prop.put("target_selected_searchresult", "searchresult".equals(target) ? 1 : 0); + + final String target_special = sb.getConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL, "_self"); + prop.put("target_selected_special_blank", "_blank".equals(target_special) ? 1 : 0); + prop.put("target_selected_special_self", "_self".equals(target_special) ? 1 : 0); + prop.put("target_selected_special_parent", "_parent".equals(target_special) ? 1 : 0); + prop.put("target_selected_special_top", "_top".equals(target_special) ? 1 : 0); + prop.put("target_selected_special_searchresult", "searchresult".equals(target_special) ? 1 : 0); + prop.put("target_special_pattern", sb.getConfig(SwitchboardConstants.SEARCH_TARGET_SPECIAL_PATTERN, "")); + + String myaddress = (sb.peers == null) ? null : sb.peers.mySeed() == null ? null : sb.peers.mySeed().getPublicAddress(); + if (myaddress == null) { + myaddress = "localhost:" + sb.getConfig("port", "8090"); + } + prop.put("myaddress", myaddress); + return prop; + } + +} diff --git a/htroot/HostBrowser.html b/htroot/HostBrowser.html index df2eb4b58..391446f21 100644 --- a/htroot/HostBrowser.html +++ b/htroot/HostBrowser.html @@ -156,9 +156,11 @@ function updatepage(str) {
#[count]# URLs
#{/list}# + #(admin)#::


+ #(/admin)# #(/outbound)# @@ -183,4 +185,4 @@ function updatepage(str) { #%env/templates/footer.template%# - \ No newline at end of file + diff --git a/htroot/HostBrowser.java b/htroot/HostBrowser.java index 927124ead..a68a7dc52 100644 --- a/htroot/HostBrowser.java +++ b/htroot/HostBrowser.java @@ -226,6 +226,7 @@ public class HostBrowser { DigestURI uri = new DigestURI(path); String host = uri.getHost(); prop.putHTML("outbound_host", host); + if (admin) prop.putHTML("outbound_admin_host", host); //used for WebStructurePicture_p link prop.putHTML("inbound_host", host); String hosthash = ASCII.String(uri.hash(), 6, 6); String[] pathparts = uri.getPaths(); diff --git a/htroot/yacysearchitem.html b/htroot/yacysearchitem.html index 383ebe203..906e3c95a 100644 --- a/htroot/yacysearchitem.html +++ b/htroot/yacysearchitem.html @@ -31,8 +31,8 @@ #(showPictures)#:: | Pictures#(/showPictures)# #(showCache)#:: | Cache#(/showCache)# #(showProxy)#:: | Augmented Browsing#(/showProxy)# + #(showHostBrowser)#:: | #(/showHostBrowser)# #(showTags)#::
#(/showTags)# -  | 

:: diff --git a/htroot/yacysearchitem.java b/htroot/yacysearchitem.java index 9d8469f4b..130b50421 100644 --- a/htroot/yacysearchitem.java +++ b/htroot/yacysearchitem.java @@ -131,6 +131,7 @@ public class yacysearchitem { prop.put("content_showPictures", sb.getConfigBool("search.result.show.pictures", true) ? 1 : 0); prop.put("content_showCache", sb.getConfigBool("search.result.show.cache", true) ? 1 : 0); prop.put("content_showProxy", sb.getConfigBool("search.result.show.proxy", true) ? 1 : 0); + prop.put("content_showHostBrowser", sb.getConfigBool("search.result.show.hostbrowser", true) ? 1 : 0); prop.put("content_showTags", sb.getConfigBool("search.result.show.tags", false) ? 1 : 0); prop.put("content_authorized", authenticated ? "1" : "0"); final String urlhash = ASCII.String(result.hash()); @@ -190,6 +191,7 @@ public class yacysearchitem { prop.put("content_showMetadata_urlhash", resulthashString); prop.put("content_showCache_link", resultUrlstring); prop.put("content_showProxy_link", resultUrlstring); + prop.put("content_showHostBrowser_link", resultUrlstring); prop.put("content_showParser_urlhash", resulthashString); prop.put("content_showTags_urlhash", resulthashString); prop.put("content_urlhexhash", Seed.b64Hash2hexHash(resulthashString));