merge rc1/master

pull/1/head
reger 12 years ago
commit a44eede8b8

@ -3,6 +3,8 @@
<classpathentry kind="src" path="source"/> <classpathentry kind="src" path="source"/>
<classpathentry excluding="api/|env/|processing/domaingraph/applet/|yacy/|api/bookmarks/|api/ymarks/|api/bookmarks/posts/|api/bookmarks/tags/|api/bookmarks/xbel/|solr/|gsa/|solr/collection1/" kind="src" path="htroot"/> <classpathentry excluding="api/|env/|processing/domaingraph/applet/|yacy/|api/bookmarks/|api/ymarks/|api/bookmarks/posts/|api/bookmarks/tags/|api/bookmarks/xbel/|solr/|gsa/|solr/collection1/" kind="src" path="htroot"/>
<classpathentry excluding="bookmarks/|ymarks/|bookmarks/posts/|bookmarks/tags/|bookmarks/xbel/" kind="src" path="htroot/api"/> <classpathentry excluding="bookmarks/|ymarks/|bookmarks/posts/|bookmarks/tags/|bookmarks/xbel/" kind="src" path="htroot/api"/>
<classpathentry kind="src" path="htroot/env"/>
<classpathentry kind="src" path="htroot/yacy"/>
<classpathentry excluding="posts/|tags/|xbel/" kind="src" path="htroot/api/bookmarks"/> <classpathentry excluding="posts/|tags/|xbel/" kind="src" path="htroot/api/bookmarks"/>
<classpathentry kind="src" path="htroot/api/ymarks"/> <classpathentry kind="src" path="htroot/api/ymarks"/>
<classpathentry kind="src" path="htroot/api/bookmarks/posts"/> <classpathentry kind="src" path="htroot/api/bookmarks/posts"/>
@ -11,15 +13,9 @@
<classpathentry excluding="collection1/" kind="src" path="htroot/solr"/> <classpathentry excluding="collection1/" kind="src" path="htroot/solr"/>
<classpathentry kind="src" path="htroot/gsa"/> <classpathentry kind="src" path="htroot/gsa"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/> <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="lib/jetty-continuation-7.3.0.v20110203.jar"/> <classpathentry kind="lib" path="lib/commons-logging-1.1.3.jar"/>
<classpathentry kind="lib" path="lib/jetty-http-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-io-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-security-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-server-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-servlet-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-servlets-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/jetty-util-7.3.0.v20110203.jar"/>
<classpathentry kind="lib" path="lib/J7Zip-modified.jar"/> <classpathentry kind="lib" path="lib/J7Zip-modified.jar"/>
<classpathentry kind="lib" path="lib/webcat-0.1-swf.jar"/>
<classpathentry kind="lib" path="lib/activation.jar"/> <classpathentry kind="lib" path="lib/activation.jar"/>
<classpathentry kind="lib" path="lib/commons-jxpath-1.3.jar"/> <classpathentry kind="lib" path="lib/commons-jxpath-1.3.jar"/>
<classpathentry kind="lib" path="lib/poi-3.6-20091214.jar"/> <classpathentry kind="lib" path="lib/poi-3.6-20091214.jar"/>
@ -49,32 +45,32 @@
<classpathentry kind="lib" path="lib/wstx-asl-3.2.9.jar"/> <classpathentry kind="lib" path="lib/wstx-asl-3.2.9.jar"/>
<classpathentry kind="lib" path="lib/slf4j-jdk14-1.7.2.jar"/> <classpathentry kind="lib" path="lib/slf4j-jdk14-1.7.2.jar"/>
<classpathentry kind="lib" path="lib/log4j-over-slf4j-1.7.2.jar"/> <classpathentry kind="lib" path="lib/log4j-over-slf4j-1.7.2.jar"/>
<classpathentry kind="lib" path="lib/httpclient-4.2.5.jar"/> <classpathentry kind="lib" path="lib/httpclient-4.3.jar"/>
<classpathentry kind="lib" path="lib/httpcore-4.2.4.jar"/> <classpathentry kind="lib" path="lib/httpcore-4.3.jar"/>
<classpathentry kind="lib" path="lib/httpmime-4.2.5.jar"/> <classpathentry kind="lib" path="lib/httpmime-4.3.jar"/>
<classpathentry kind="lib" path="lib/noggit-0.5.jar"/> <classpathentry kind="lib" path="lib/noggit-0.5.jar"/>
<classpathentry kind="lib" path="lib/metadata-extractor-2.6.2.jar"/> <classpathentry kind="lib" path="lib/metadata-extractor-2.6.2.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-common-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-phonetic-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-classification-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-codecs-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-core-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-facet-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-grouping-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-highlighter-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-join-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-memory-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-misc-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-queries-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-queryparser-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-spatial-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-suggest-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/solr-core-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/solr-solrj-4.4.0.jar"/>
<classpathentry kind="lib" path="lib/jcifs-1.3.17.jar"/> <classpathentry kind="lib" path="lib/jcifs-1.3.17.jar"/>
<classpathentry kind="lib" path="lib/fontbox-1.8.2.jar"/> <classpathentry kind="lib" path="lib/fontbox-1.8.2.jar"/>
<classpathentry kind="lib" path="lib/jempbox-1.8.2.jar"/> <classpathentry kind="lib" path="lib/jempbox-1.8.2.jar"/>
<classpathentry kind="lib" path="lib/pdfbox-1.8.2.jar"/> <classpathentry kind="lib" path="lib/pdfbox-1.8.2.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-common-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-analyzers-phonetic-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-classification-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-codecs-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-core-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-facet-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-grouping-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-highlighter-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-join-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-memory-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-misc-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-queries-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-queryparser-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-spatial-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/lucene-suggest-4.5.0.jar"/>
<classpathentry kind="lib" path="lib/solr-core-4.5.0.jar" sourcepath="/Volumes/Tres/noBackup/Downloads/solr-4.5.0/solr/core/src/java"/>
<classpathentry kind="lib" path="lib/solr-solrj-4.5.0.jar"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/> <classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="lib" path="lib/icu4j-core.jar"/> <classpathentry kind="lib" path="lib/icu4j-core.jar"/>
<classpathentry kind="lib" path="lib/htmllexer.jar"/> <classpathentry kind="lib" path="lib/htmllexer.jar"/>
@ -84,11 +80,4 @@
<classpathentry kind="lib" path="lib/iri-0.8.jar"/> <classpathentry kind="lib" path="lib/iri-0.8.jar"/>
<classpathentry kind="lib" path="lib/jsoup-1.6.3.jar"/> <classpathentry kind="lib" path="lib/jsoup-1.6.3.jar"/>
<classpathentry kind="output" path="gen"/> <classpathentry kind="output" path="gen"/>
<classpathentry kind="src" path="htroot/api/bookmarks/posts"/>
<classpathentry kind="src" path="htroot/api/bookmarks/tags"/>
<classpathentry kind="src" path="htroot/api/bookmarks/xbel"/>
<classpathentry kind="src" path="htroot/api/ymarks"/>
<classpathentry kind="src" path="htroot/env"/>
<classpathentry kind="src" path="htroot/yacy"/>
<classpathentry kind="src" path="source"/>
</classpath> </classpath>

@ -76,21 +76,21 @@
<string>$JAVAROOT/lib/jsch-0.1.42.jar</string> <string>$JAVAROOT/lib/jsch-0.1.42.jar</string>
<string>$JAVAROOT/lib/json-simple-1.1.jar</string> <string>$JAVAROOT/lib/json-simple-1.1.jar</string>
<string>$JAVAROOT/lib/log4j-over-slf4j-1.7.2.jar</string> <string>$JAVAROOT/lib/log4j-over-slf4j-1.7.2.jar</string>
<string>$JAVAROOT/lib/lucene-analyzers-common-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-analyzers-common-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-analyzers-phonetic-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-analyzers-phonetic-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-classification-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-classification-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-codecs-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-codecs-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-core-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-core-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-facet-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-facet-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-grouping-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-grouping-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-highlighter-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-highlighter-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-join-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-join-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-memory-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-memory-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-misc-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-misc-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-queries-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-queries-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-queryparser-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-queryparser-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-spatial-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-spatial-4.5.0.jar</string>
<string>$JAVAROOT/lib/lucene-suggest-4.4.0.jar</string> <string>$JAVAROOT/lib/lucene-suggest-4.5.0.jar</string>
<string>$JAVAROOT/lib/metadata-extractor-2.6.2.jar</string> <string>$JAVAROOT/lib/metadata-extractor-2.6.2.jar</string>
<string>$JAVAROOT/lib/mysql-connector-java-5.1.12-bin.jar</string> <string>$JAVAROOT/lib/mysql-connector-java-5.1.12-bin.jar</string>
<string>$JAVAROOT/lib/noggit-0.5.jar</string> <string>$JAVAROOT/lib/noggit-0.5.jar</string>
@ -100,8 +100,8 @@
<string>$JAVAROOT/lib/sax-2.0.1.jar</string> <string>$JAVAROOT/lib/sax-2.0.1.jar</string>
<string>$JAVAROOT/lib/slf4j-api-1.7.2.jar</string> <string>$JAVAROOT/lib/slf4j-api-1.7.2.jar</string>
<string>$JAVAROOT/lib/slf4j-jdk14-1.7.2.jar</string> <string>$JAVAROOT/lib/slf4j-jdk14-1.7.2.jar</string>
<string>$JAVAROOT/lib/solr-core-4.4.0.jar</string> <string>$JAVAROOT/lib/solr-core-4.5.0.jar</string>
<string>$JAVAROOT/lib/solr-solrj-4.4.0.jar</string> <string>$JAVAROOT/lib/solr-solrj-4.5.0.jar</string>
<string>$JAVAROOT/lib/spatial4j-0.3.jar</string> <string>$JAVAROOT/lib/spatial4j-0.3.jar</string>
<string>$JAVAROOT/lib/webcat-0.1-swf.jar</string> <string>$JAVAROOT/lib/webcat-0.1-swf.jar</string>
<string>$JAVAROOT/lib/wstx-asl-3.2.9.jar</string> <string>$JAVAROOT/lib/wstx-asl-3.2.9.jar</string>

@ -0,0 +1,504 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

File diff suppressed because it is too large Load Diff

@ -203,21 +203,21 @@
<pathelement location="${lib}/json-simple-1.1.jar" /> <pathelement location="${lib}/json-simple-1.1.jar" />
<pathelement location="${lib}/jsoup-1.6.3.jar" /> <pathelement location="${lib}/jsoup-1.6.3.jar" />
<pathelement location="${lib}/log4j-over-slf4j-1.7.2.jar" /> <pathelement location="${lib}/log4j-over-slf4j-1.7.2.jar" />
<pathelement location="${lib}/lucene-analyzers-common-4.4.0.jar" /> <pathelement location="${lib}/lucene-analyzers-common-4.5.0.jar" />
<pathelement location="${lib}/lucene-analyzers-phonetic-4.4.0.jar" /> <pathelement location="${lib}/lucene-analyzers-phonetic-4.5.0.jar" />
<pathelement location="${lib}/lucene-classification-4.4.0.jar" /> <pathelement location="${lib}/lucene-classification-4.5.0.jar" />
<pathelement location="${lib}/lucene-codecs-4.4.0.jar" /> <pathelement location="${lib}/lucene-codecs-4.5.0.jar" />
<pathelement location="${lib}/lucene-core-4.4.0.jar" /> <pathelement location="${lib}/lucene-core-4.5.0.jar" />
<pathelement location="${lib}/lucene-facet-4.4.0.jar" /> <pathelement location="${lib}/lucene-facet-4.5.0.jar" />
<pathelement location="${lib}/lucene-grouping-4.4.0.jar" /> <pathelement location="${lib}/lucene-grouping-4.5.0.jar" />
<pathelement location="${lib}/lucene-highlighter-4.4.0.jar" /> <pathelement location="${lib}/lucene-highlighter-4.5.0.jar" />
<pathelement location="${lib}/lucene-join-4.4.0.jar" /> <pathelement location="${lib}/lucene-join-4.5.0.jar" />
<pathelement location="${lib}/lucene-memory-4.4.0.jar" /> <pathelement location="${lib}/lucene-memory-4.5.0.jar" />
<pathelement location="${lib}/lucene-misc-4.4.0.jar" /> <pathelement location="${lib}/lucene-misc-4.5.0.jar" />
<pathelement location="${lib}/lucene-queries-4.4.0.jar" /> <pathelement location="${lib}/lucene-queries-4.5.0.jar" />
<pathelement location="${lib}/lucene-queryparser-4.4.0.jar" /> <pathelement location="${lib}/lucene-queryparser-4.5.0.jar" />
<pathelement location="${lib}/lucene-spatial-4.4.0.jar" /> <pathelement location="${lib}/lucene-spatial-4.5.0.jar" />
<pathelement location="${lib}/lucene-suggest-4.4.0.jar" /> <pathelement location="${lib}/lucene-suggest-4.5.0.jar" />
<pathelement location="${lib}/metadata-extractor-2.6.2.jar" /> <pathelement location="${lib}/metadata-extractor-2.6.2.jar" />
<pathelement location="${lib}/mysql-connector-java-5.1.12-bin.jar" /> <pathelement location="${lib}/mysql-connector-java-5.1.12-bin.jar" />
<pathelement location="${lib}/noggit-0.5.jar" /> <pathelement location="${lib}/noggit-0.5.jar" />
@ -227,8 +227,8 @@
<pathelement location="${lib}/sax-2.0.1.jar" /> <pathelement location="${lib}/sax-2.0.1.jar" />
<pathelement location="${lib}/slf4j-api-1.7.2.jar" /> <pathelement location="${lib}/slf4j-api-1.7.2.jar" />
<pathelement location="${lib}/slf4j-jdk14-1.7.2.jar" /> <pathelement location="${lib}/slf4j-jdk14-1.7.2.jar" />
<pathelement location="${lib}/solr-core-4.4.0.jar" /> <pathelement location="${lib}/solr-core-4.5.0.jar" />
<pathelement location="${lib}/solr-solrj-4.4.0.jar" /> <pathelement location="${lib}/solr-solrj-4.5.0.jar" />
<pathelement location="${lib}/spatial4j-0.3.jar" /> <pathelement location="${lib}/spatial4j-0.3.jar" />
<pathelement location="${lib}/webcat-0.1-swf.jar" /> <pathelement location="${lib}/webcat-0.1-swf.jar" />
<pathelement location="${lib}/wstx-asl-3.2.9.jar" /> <pathelement location="${lib}/wstx-asl-3.2.9.jar" />

@ -242,12 +242,18 @@ inboundlinks_protocol_sxt
## internal links, the url only without the protocol ## internal links, the url only without the protocol
inboundlinks_urlstub_sxt inboundlinks_urlstub_sxt
## internal links, the visible anchor text
inboundlinks_anchortext_txt
## external links, only the protocol ## external links, only the protocol
outboundlinks_protocol_sxt outboundlinks_protocol_sxt
## external links, the url only without the protocol ## external links, the url only without the protocol
outboundlinks_urlstub_sxt outboundlinks_urlstub_sxt
## external links, the visible anchor text
outboundlinks_anchortext_txt
## all text/words appearing in image alt texts or the tokenized url ## all text/words appearing in image alt texts or the tokenized url
images_text_t images_text_t
@ -353,6 +359,9 @@ url_protocol_s
## the file name (which is the string after the last '/' and before the query part from '?' on) without the file extension ## the file name (which is the string after the last '/' and before the query part from '?' on) without the file extension
url_file_name_s url_file_name_s
## tokens generated from url_file_name_s which can be used for better matching and result boosting
url_file_name_tokens_t
## the file name extension ## the file name extension
url_file_ext_s url_file_ext_s
@ -435,7 +444,9 @@ cr_host_chance_d
## normalization of chance: 0 for lower halve of cr_host_count_i urls, 1 for 1/2 of the remaining and so on. the maximum number is 10 ## normalization of chance: 0 for lower halve of cr_host_count_i urls, 1 for 1/2 of the remaining and so on. the maximum number is 10
cr_host_norm_i cr_host_norm_i
## custom rating; to be set with external rating information
rating_i
## names of cms attributes; if several are recognized then they are listen in decreasing order of number of matching criterias ## names of cms attributes; if several are recognized then they are listen in decreasing order of number of matching criterias
#ext_cms_txt #ext_cms_txt

@ -35,7 +35,7 @@
that you fully re-index after changing this setting as it can that you fully re-index after changing this setting as it can
affect both how text is indexed and queried. affect both how text is indexed and queried.
--> -->
<luceneMatchVersion>4.4</luceneMatchVersion> <luceneMatchVersion>4.5</luceneMatchVersion>
<!-- <lib/> directives can be used to instruct Solr to load an Jars <!-- <lib/> directives can be used to instruct Solr to load an Jars
identified and use them to resolve any "plugins" specified in identified and use them to resolve any "plugins" specified in

@ -962,20 +962,20 @@ search.ranking.rwi.profile =
# The field boostfunctionmode can be either 'add' or 'multiply' to describe the mode. # The field boostfunctionmode can be either 'add' or 'multiply' to describe the mode.
# All boost methods > 0 must have names to be able to select this name with a query, with the syntax /name # All boost methods > 0 must have names to be able to select this name with a query, with the syntax /name
search.ranking.solr.collection.boostname.tmpa.0=Default Profile search.ranking.solr.collection.boostname.tmpa.0=Default Profile
search.ranking.solr.collection.boostfields.tmpa.0=url_paths_sxt^1000.0,synonyms_sxt^1.0,title^10000.0,text_t^2.0,h1_txt^1000.0,h2_txt^100.0,host_organization_s^100000.0 search.ranking.solr.collection.boostfields.tmpa.0=url_paths_sxt^3.0,synonyms_sxt^0.5,title^5.0,text_t^1.0,host_s^6.0,h1_txt^5.0,url_file_name_tokens_t^4.0,h2_txt^3.0
search.ranking.solr.collection.boostquery.tmpa.0=fuzzy_signature_unique_b:true^100000.0 search.ranking.solr.collection.boostquery.tmpa.0=clickdepth_i:0^0.8 clickdepth_i:1^0.4
search.ranking.solr.collection.boostfunction.tmpb.0=scale(cr_host_norm_i,1,20) search.ranking.solr.collection.boostfunction.tmpb.0=
search.ranking.solr.collection.boostname.tmpa.1=Date Profile: sort by date in descending order for a '/data' usage search.ranking.solr.collection.boostname.tmpa.1=Date Profile: sort by date in descending order for a '/data' usage
search.ranking.solr.collection.boostfields.tmpa.1=text_t^1.0 search.ranking.solr.collection.boostfields.tmpa.1=text_t^1.0
search.ranking.solr.collection.boostquery.tmpa.1=fuzzy_signature_unique_b:true^100000.0 search.ranking.solr.collection.boostquery.tmpa.1=clickdepth_i:0^0.8 clickdepth_i:1^0.4
search.ranking.solr.collection.boostfunction.tmpb.1=recip(rord(last_modified),1,1000,1000) search.ranking.solr.collection.boostfunction.tmpb.1=recip(rord(last_modified),1,1000,1000)
search.ranking.solr.collection.boostname.tmpa.2=Intranet Profile: when a search is done on a singe domain only, i.e. if a site:-operator is used search.ranking.solr.collection.boostname.tmpa.2=Intranet Profile: when a search is done on a singe domain only, i.e. if a site:-operator is used
search.ranking.solr.collection.boostfields.tmpa.2=url_paths_sxt^1000.0,synonyms_sxt^1.0,title^10000.0,text_t^2.0,h1_txt^1000.0,h2_txt^100.0 search.ranking.solr.collection.boostfields.tmpa.2=url_paths_sxt^3.0,synonyms_sxt^0.5,title^5.0,text_t^1.0,h1_txt^5.0,url_file_name_tokens_t^4.0,h2_txt^3.0,h3_txt^2.0
search.ranking.solr.collection.boostquery.tmpa.2=fuzzy_signature_unique_b:true^100000.0 search.ranking.solr.collection.boostquery.tmpa.2=fuzzy_signature_unique_b:true^10.0
search.ranking.solr.collection.boostfunction.tmpb.2=scale(cr_host_norm_i,1,20) search.ranking.solr.collection.boostfunction.tmpb.2=
search.ranking.solr.collection.boostname.tmpa.3=_unused3 search.ranking.solr.collection.boostname.tmpa.3=_unused3
search.ranking.solr.collection.boostfields.tmpa.3=text_t^1.0 search.ranking.solr.collection.boostfields.tmpa.3=text_t^1.0
search.ranking.solr.collection.boostquery.tmpa.3=fuzzy_signature_unique_b:true^100000.0 search.ranking.solr.collection.boostquery.tmpa.3=clickdepth_i:0^0.8 clickdepth_i:1^0.4
search.ranking.solr.collection.boostfunction.tmpb.3= search.ranking.solr.collection.boostfunction.tmpb.3=
# the following values are used to identify duplicate content # the following values are used to identify duplicate content

@ -6,9 +6,185 @@
<script type="text/javascript" src="/js/ajax.js"></script> <script type="text/javascript" src="/js/ajax.js"></script>
<script type="text/javascript" src="/js/IndexCreate.js"></script> <script type="text/javascript" src="/js/IndexCreate.js"></script>
<script type="text/javascript"> <script type="text/javascript">
function check(key){ //<![CDATA[
document.getElementById(key).checked = 'checked'; /**
* Set the state of all elements based on other elements state.
* @param {String} cId id of the element that had changed it's state
*/
function setStates(cId) {
// order matters!
// crawl start points
if ($('#url').isChecked()) {
$('#crawlingURL').enable();
$('#sitemapURL, #crawlingFile').disable();
if (cId === "url") { $('#crawlingURL').focus(); }
} else if ($('#sitemap').isChecked()) {
$('#sitemapURL').enable();
$('#crawlingURL, #crawlingFile').disable();
if (cId === "sitemap") { $('#sitemapURL').focus(); }
} else if ($('#file').isChecked()) {
$('#crawlingFile').enable();
$('#crawlingURL, #sitemapURL').disable();
if (cId === "file") { $('#crawlingFile').focus(); }
}
// Load Filters
if (cId === "rangeDomain" || cId === "rangeSubpath" ||
cId === "rangeWide" || typeof cId === 'undefined') {
if ($('#rangeDomain').isChecked() ||
$('#rangeSubpath').isChecked()) {
// restrict to sub-path / domain
$('#mustmatch').disable();
// skip these on initial load
if (typeof cId !== 'undefined') {
$('#deleteoldoff, #deleteoldage').uncheck();
$('#deleteoldon').check();
}
} else if ($('#rangeWide').isChecked()) {
// use Filter
$('#mustmatch').enable();
// skip these on initial load
if (typeof cId !== 'undefined') {
$('#deleteoldon, #deleteoldage').uncheck();
$('#deleteoldoff').check();
if (cId === "rangeWide") { $('#mustmatch').focus(); }
}
}
}
// crawl start: From File
if ($("#sitelist").isChecked()) {
$('#rangeDomain').check();
}
// Delete only old
if ($('#deleteoldage').isChecked()) {
$('#deleteIfOlderNumber, #deleteIfOlderUnit').enable();
} else {
$('#deleteIfOlderNumber, #deleteIfOlderUnit').disable();
}
// Reload if old
if ($('#reloadoldage').isChecked()) {
$('#reloadIfOlderNumber, #reloadIfOlderUnit').enable();
} else {
$('#reloadIfOlderNumber, #reloadIfOlderUnit').disable();
}
// Use Must-Match List for Country Codes?
if ($('#noCountryMustMatchSwitch').isChecked()) {
$('#countryMustMatchList').disable();
} else {
$('#countryMustMatchList').enable();
if (cId === "countryMustMatchSwitch") {
$('#countryMustMatchList').focus();
}
}
// Maximum pages per domain
if ($('#crawlingDomMaxCheck').isChecked()) {
$('#crawlingDomMaxPages').enable();
if (cId === "crawlingDomMaxCheck") {
$('#crawlingDomMaxPages').focus();
}
} else {
$('#crawlingDomMaxPages').disable();
}
// Remote crawl
if ($('#crawlOrder').isChecked()) {
$('#intention').enable();
if (cId === "crawlOrder") { $('#intention').focus(); }
} else {
$('#intention').disable();
}
}
/**
* Disable element if value matches val.
* @param {String} id element id
* @param {String} val value to comapre to elements value */
function disableIf(id, val) {
var e = $('#'+id);
if (e.val() === val) {
e.disable();
}
} }
$(document).ready(function() {
(function($) {
/** Disable a form element. */
$.fn.disable = function() {
return this.each(function() {
$(this).prop('disabled', true);
});
};
/** Enable a form element. */
$.fn.enable = function() {
return this.each(function() {
$(this).prop('disabled', false);
});
};
/** Check DOM & properties if element is checkeds. */
$.fn.isChecked = function() {
return $(this).attr("checked") || $(this).prop("checked");
};
/** Set checked state for checkoxes/radio buttons. */
$.fn.check = function() {
return this.each(function() {
$(this).attr("checked", "checked").prop("checked", true);
});
};
/** Unset checked state for checkoxes/radio buttons. */
$.fn.uncheck = function() {
return this.each(function() {
$(this).removeAttr("checked").prop("checked", false);
});
};
})(jQuery);
/**
* On form submission remove text fields with default values as they
* are set to those by yacy values by yacy, if missing.
* @param {eventObject} ev */
$('#Crawler').on('submit', function(ev){
var defaultMatchAll = "#[matchAllStr]#";
var defaultMatchNone = "#[matchNoneStr]#";
// remove empty textfields
disableIf('crawlingDepthExtension', '');
disableIf('intention', '');
// remove if MATCH_NEVER_STRING
disableIf('mustnotmatch', defaultMatchNone);
disableIf('ipMustnotmatch', defaultMatchNone);
disableIf('indexmustnotmatch', defaultMatchNone);
disableIf('indexcontentmustnotmatch', defaultMatchNone);
// remove if MATCH_ALL_STRING
disableIf('mustmatch', defaultMatchAll);
disableIf('ipMustmatch', defaultMatchAll);
disableIf('indexmustmatch', defaultMatchAll);
disableIf('indexcontentmustmatch', defaultMatchAll);
// remove default collection name
disableIf('collection', '#[defaultCollection]#');
});
// add event handlers to all checkoxes & radio buttons
$(document).on('change', 'input:checkbox,input:radio', function() {
setStates($(this).attr("id"));
});
// set initial states
if ($('#crawlingURL').val() !== '') { changed(); }
setStates();
});
//]]>
</script> </script>
<style type="text/css"> <style type="text/css">
.nobr { .nobr {
@ -50,8 +226,8 @@
Each of these URLs are the root for a crawl start, existing start URLs are always re-loaded. Each of these URLs are the root for a crawl start, existing start URLs are always re-loaded.
Other already visited URLs are sorted out as "double", if they are not allowed using the re-crawl option. Other already visited URLs are sorted out as "double", if they are not allowed using the re-crawl option.
</span></span> </span></span>
<input type="radio" align="top" name="crawlingMode" id="url" value="url" checked="checked" /> <input type="radio" align="top" name="crawlingMode" id="url" value="url" #(crawlingMode_url)#::checked="checked"#(/crawlingMode_url)# />
<textarea name="crawlingURL" id="crawlingURL" cols="64" rows="3" size="41" onkeypress="changed()" onfocus="check('url')" >#[starturl]#</textarea> <textarea name="crawlingURL" id="crawlingURL" cols="64" rows="3" size="41" onkeypress="changed()">#[starturl]#</textarea>
&nbsp; &nbsp;
<span id="robotsOK"></span> <span id="robotsOK"></span>
<span id="title"><br/></span> <span id="title"><br/></span>
@ -59,20 +235,20 @@
</dd> </dd>
<dt></dt> <dt></dt>
<dd> <dd>
<input name="bookmarkTitle" id="bookmarkTitle" type="text" size="46" maxlength="256" value="" readonly="readonly" style="background:transparent; border:0px"/> <input name="bookmarkTitle" id="bookmarkTitle" type="text" size="46" maxlength="256" value="#[bookmarkTitle]#" readonly="readonly" style="background:transparent; border:0px"/>
</dd> </dd>
<dt>From Link-List of URL</dt> <dt>From Link-List of URL</dt>
<dd> <dd>
<input type="radio" name="crawlingMode" id="sitelist" value="sitelist" disabled="disabled" onclick="document.getElementById('Crawler').rangeDomain.checked = true;"/><br /> <input type="radio" name="crawlingMode" id="sitelist" value="sitelist" #(has_url)#disabled="disabled"::#(/has_url)# #(crawlingMode_sitelist)#::checked="checked"#(/crawlingMode_sitelist)#/><br />
<div id="sitelistURLs"></div> <div id="sitelistURLs"></div>
</dd> </dd>
<dt>From Sitemap</dt> <dt>From Sitemap</dt>
<dd> <dd>
<input type="radio" name="crawlingMode" id="sitemap" value="sitemap" disabled="disabled"/><input name="sitemapURL" type="text" size="71" maxlength="256" value="" readonly="readonly"/> <input type="radio" name="crawlingMode" id="sitemap" value="sitemap" #(crawlingMode_sitemap)#::checked="checked"#(/crawlingMode_sitemap)# #(has_sitemapURL)#disabled="disabled"::#(/has_sitemapURL)#/><input name="sitemapURL" id="sitemapURL" type="text" size="71" maxlength="256" value="#[sitemapURL]#"/>
</dd> </dd>
<dt>From File (enter a path<br/>within your local file system)</dt> <dt>From File (enter a path<br/>within your local file system)</dt>
<dd> <dd>
<input type="radio" name="crawlingMode" id="file" value="file" onclick="document.getElementById('Crawler').rangeDomain.checked = true;"/><input type="text" name="crawlingFile" size="71" maxlength="256" onfocus="check('file')"/><!--<input type="file" name="crawlingFile" size="18" onfocus="check('file')"/>--> <input type="radio" name="crawlingMode" id="file" value="file" #(crawlingMode_file)#::checked="checked"#(/crawlingMode_file)#/><input type="text" name="crawlingFile" id="crawlingFile" value="#[crawlingFile]#" size="71" maxlength="256"/>
</dd> </dd>
</dl> </dl>
</fieldset> </fieldset>
@ -129,10 +305,10 @@
</span></span> </span></span>
<table border="0"> <table border="0">
<tr><td width="110"><img src="/env/grafics/plus.gif"> must-match</td><td></td></tr> <tr><td width="110"><img src="/env/grafics/plus.gif"> must-match</td><td></td></tr>
<tr><td colspan="2"><input type="radio" name="range" id="rangeDomain" value="domain" onclick="document.getElementById('mustmatch').disabled=true;document.getElementById('deleteoldon').disabled=false;document.getElementById('deleteoldage').disabled=false;document.getElementById('deleteoldon').checked=true;"/>Restrict to start domain(s)</td></tr> <tr><td colspan="2"><input type="radio" name="range" id="rangeDomain" value="domain" #(range_domain)#::checked="checked"#(/range_domain)#/>Restrict to start domain(s)</td></tr>
<tr><td colspan="2"><input type="radio" name="range" id="rangeSubpath" value="subpath" onclick="document.getElementById('mustmatch').disabled=true;document.getElementById('deleteoldon').disabled=false;document.getElementById('deleteoldage').disabled=false;document.getElementById('deleteoldon').checked=true;" />Restrict to sub-path(s)</td></tr> <tr><td colspan="2"><input type="radio" name="range" id="rangeSubpath" value="subpath" #(range_subpath)#::checked="checked"#(/range_subpath)#/>Restrict to sub-path(s)</td></tr>
<tr><td><input type="radio" name="range" id="rangeWide" value="wide" checked="checked" onclick="document.getElementById('mustmatch').disabled=false;document.getElementById('deleteoldoff').checked=true;document.getElementById('deleteoldon').disabled=true;document.getElementById('deleteoldage').disabled=true;"/>Use filter</td> <tr><td><input type="radio" name="range" id="rangeWide" value="wide" #(range_wide)#::checked="checked"#(/range_wide)#/>Use filter</td>
<td valign="bottom"><input name="mustmatch" id="mustmatch" type="text" size="55" maxlength="100000" value="#[mustmatch]#" onclick="document.getElementById('deleteoldon').disabled=false;document.getElementById('deleteoldage').disabled=false"/></td></tr> <td valign="bottom"><input name="mustmatch" id="mustmatch" type="text" size="55" maxlength="100000" value="#[mustmatch]#"/></td></tr>
<tr><td><img src="/env/grafics/minus.gif"> must-not-match</td><td><input name="mustnotmatch" id="mustnotmatch" type="text" size="55" maxlength="100000" value="#[mustnotmatch]#" /></td></tr> <tr><td><img src="/env/grafics/minus.gif"> must-not-match</td><td><input name="mustnotmatch" id="mustnotmatch" type="text" size="55" maxlength="100000" value="#[mustnotmatch]#" /></td></tr>
</table> </table>
</dd> </dd>
@ -149,8 +325,8 @@
Crawls can be restricted to specific countries. This uses the country code that can be computed from Crawls can be restricted to specific countries. This uses the country code that can be computed from
the IP of the server that hosts the page. The filter is not a regular expressions but a list of country codes, separated by comma. the IP of the server that hosts the page. The filter is not a regular expressions but a list of country codes, separated by comma.
</span></span> </span></span>
<input type="radio" name="countryMustMatchSwitch" id="countryMustMatchSwitch" value="false" checked="checked" />no country code restriction<br /> <input type="radio" name="countryMustMatchSwitch" id="noCountryMustMatchSwitch" value="0" #(countryMustMatchSwitchChecked)#checked="checked"::#(/countryMustMatchSwitchChecked)# />no country code restriction<br />
<input type="radio" name="countryMustMatchSwitch" id="countryMustMatchSwitch" value="true" />Use filter&nbsp;&nbsp; <input type="radio" name="countryMustMatchSwitch" id="countryMustMatchSwitch" value="1" #(countryMustMatchSwitchChecked)#::checked="checked"#(/countryMustMatchSwitchChecked)# />Use filter&nbsp;&nbsp;
<input name="countryMustMatchList" id="countryMustMatchList" type="text" size="60" maxlength="256" value="#[countryMustMatch]#" /> <input name="countryMustMatchList" id="countryMustMatchList" type="text" size="60" maxlength="256" value="#[countryMustMatch]#" />
</dd> </dd>
</dl> </dl>
@ -187,25 +363,21 @@
After a crawl was done in the past, document may become stale and eventually they are also deleted on the target host. After a crawl was done in the past, document may become stale and eventually they are also deleted on the target host.
To remove old files from the search index it is not sufficient to just consider them for re-load but it may be necessary To remove old files from the search index it is not sufficient to just consider them for re-load but it may be necessary
to delete them because they simply do not exist any more. Use this in combination with re-crawl while this time should be longer. to delete them because they simply do not exist any more. Use this in combination with re-crawl while this time should be longer.
</span></span><input type="radio" name="deleteold" id="deleteoldoff" value="off" checked="checked"/>Do not delete any document before the crawl is started.</dd> </span></span><input type="radio" name="deleteold" id="deleteoldoff" value="off" #(deleteold_off)#::checked="checked"#(/deleteold_off)#/>Do not delete any document before the crawl is started.</dd>
<dt>Delete sub-path</dt> <dt>Delete sub-path</dt>
<dd><input type="radio" name="deleteold" id="deleteoldon" value="on" disabled="true"/>For each host in the start url list, delete all documents (in the given subpath) from that host.</dd> <dd><input type="radio" name="deleteold" id="deleteoldon" value="on" #(deleteold_on)#::checked="checked"#(/deleteold_on)#/>For each host in the start url list, delete all documents (in the given subpath) from that host.</dd>
<dt>Delete only old</dt> <dt>Delete only old</dt>
<dd><input type="radio" name="deleteold" id="deleteoldage" value="age" disabled="true"/>Treat documents that are loaded <dd><input type="radio" name="deleteold" id="deleteoldage" value="age" #(deleteold_age)#::checked="checked"#(/deleteold_age)#/>Treat documents that are loaded
<select name="deleteIfOlderNumber" id="deleteIfOlderNumber"> <select name="deleteIfOlderNumber" id="deleteIfOlderNumber">
<option value="1">1</option><option value="2">2</option><option value="3">3</option> #(deleteIfOlderSelect)#::
<option value="4">4</option><option value="5">5</option><option value="6">6</option> #{list}#<option value="#[name]#" #(default)#::selected="selected"#(/default)#>#[name]#</option>#{/list}#
<option value="7">7</option> #(/deleteIfOlderSelect)#
<option value="8">8</option><option value="9">9</option><option value="10">10</option> </select>
<option value="12">12</option><option value="14" selected="selected">14</option><option value="21">21</option> <select name="deleteIfOlderUnit" id="deleteIfOlderUnit">
<option value="28">28</option><option value="30">30</option> #(deleteIfOlderUnitSelect)#::
</select> #{list}#<option value="#[value]#" #(default)#::selected="selected"#(/default)#>#[name]#</option>#{/list}#
<select name="deleteIfOlderUnit" id="deleteIfOlderUnit"> #(/deleteIfOlderUnitSelect)#
<option value="year">years</option> </select> ago as stale and delete them before the crawl is started.
<option value="month">months</option>
<option value="day" selected="selected">days</option>
<option value="hour">hours</option>
</select> ago as stale and delete them before the crawl is started.
</dd> </dd>
</dl> </dl>
</fieldset> </fieldset>
@ -217,23 +389,19 @@
A web crawl performs a double-check on all links found in the internet against the internal database. If the same url is found again, A web crawl performs a double-check on all links found in the internet against the internal database. If the same url is found again,
then the url is treated as double when you check the 'no doubles' option. A url may be loaded again when it has reached a specific age, then the url is treated as double when you check the 'no doubles' option. A url may be loaded again when it has reached a specific age,
to use that check the 're-load' option. to use that check the 're-load' option.
</span></span><input type="radio" name="recrawl" value="nodoubles" checked="checked"/>Never load any page that is already known. Only the start-url may be loaded again.</dd> </span></span><input type="radio" name="recrawl" id="reloadoldoff" value="nodoubles" #(recrawl_nodoubles)#::checked="checked"#(/recrawl_nodoubles)#/>Never load any page that is already known. Only the start-url may be loaded again.</dd>
<dt>Re-load</dt> <dt>Re-load</dt>
<dd><input type="radio" name="recrawl" value="reload"/>Treat documents that are loaded <dd><input type="radio" name="recrawl" id="reloadoldage" value="reload" #(recrawl_reload)#::checked="checked"#(/recrawl_reload)#/>Treat documents that are loaded
<select name="reloadIfOlderNumber" id="reloadIfOlderNumber"> <select name="reloadIfOlderNumber" id="reloadIfOlderNumber">
<option value="1">1</option><option value="2">2</option><option value="3">3</option> #(reloadIfOlderSelect)#::
<option value="4">4</option><option value="5">5</option><option value="6">6</option> #{list}#<option value="#[name]#" #(default)#::selected="selected"#(/default)#>#[name]#</option>#{/list}#
<option value="7" selected="selected">7</option> #(/reloadIfOlderSelect)#
<option value="8">8</option><option value="9">9</option><option value="10">10</option> </select>
<option value="12">12</option><option value="14">14</option><option value="21">21</option> <select name="reloadIfOlderUnit" id="reloadIfOlderUnit">
<option value="28">28</option><option value="30">30</option> #(reloadIfOlderUnitSelect)#::
</select> #{list}#<option value="#[value]#" #(default)#::selected="selected"#(/default)#>#[name]#</option>#{/list}#
<select name="reloadIfOlderUnit" id="reloadIfOlderUnit"> #(/reloadIfOlderUnitSelect)#
<option value="year">years</option> </select> ago as stale and load them again. If they are younger, they are ignored.
<option value="month">months</option>
<option value="day" selected="selected">days</option>
<option value="hour">hours</option>
</select> ago as stale and load them again. If they are younger, they are ignored.
</dd> </dd>
</dl> </dl>
</fieldset> </fieldset>
@ -256,10 +424,10 @@
<b>if&nbsp;exist</b>: use the cache if the cache exist. Do no check freshness. Otherwise use online source; <b>if&nbsp;exist</b>: use the cache if the cache exist. Do no check freshness. Otherwise use online source;
<b>cache&nbsp;only</b>: never go online, use all content from cache. If no cache exist, treat content as unavailable <b>cache&nbsp;only</b>: never go online, use all content from cache. If no cache exist, treat content as unavailable
</span></span> </span></span>
<input type="radio" name="cachePolicy" value="nocache" />no&nbsp;cache&nbsp;&nbsp;&nbsp; <input type="radio" name="cachePolicy" value="nocache" #(cachePolicy_nocache)#::checked="checked"#(/cachePolicy_nocache)#/>no&nbsp;cache&nbsp;&nbsp;&nbsp;
<input type="radio" name="cachePolicy" value="iffresh" checked="checked" />if&nbsp;fresh&nbsp;&nbsp;&nbsp; <input type="radio" name="cachePolicy" value="iffresh" #(cachePolicy_iffresh)#::checked="checked"#(/cachePolicy_iffresh)# />if&nbsp;fresh&nbsp;&nbsp;&nbsp;
<input type="radio" name="cachePolicy" value="ifexist" />if&nbsp;exist&nbsp;&nbsp;&nbsp; <input type="radio" name="cachePolicy" value="ifexist" #(cachePolicy_ifexist)#::checked="checked"#(/cachePolicy_ifexist)#/>if&nbsp;exist&nbsp;&nbsp;&nbsp;
<input type="radio" name="cachePolicy" value="cacheonly" />cache&nbsp;only <input type="radio" name="cachePolicy" value="cacheonly" #(cachePolicy_cacheonly)#::checked="checked"#(/cachePolicy_cacheonly)#/>cache&nbsp;only
</dd> </dd>
</dl> </dl>
</fieldset> </fieldset>
@ -290,7 +458,7 @@
<dt>Do Local Indexing</dt> <dt>Do Local Indexing</dt>
<dd> <dd>
<span class="info" style="float:right"><img src="/env/grafics/i16.gif" width="16" height="16" alt="info"/><span style="right:0px;"> <span class="info" style="float:right"><img src="/env/grafics/i16.gif" width="16" height="16" alt="info"/><span style="right:0px;">
This enables indexing of the wepages the crawler will download. This should be switched on by default, unless you want to crawl only to fill the This enables indexing of the webpages the crawler will download. This should be switched on by default, unless you want to crawl only to fill the
Document Cache without indexing. Document Cache without indexing.
</span></span> </span></span>
<label for="indexText">index text</label>: <label for="indexText">index text</label>:
@ -315,7 +483,7 @@
</td> </td>
<td> <td>
<label for="intention">Describe your intention to start this global crawl (optional)</label>:<br /> <label for="intention">Describe your intention to start this global crawl (optional)</label>:<br />
<input name="intention" id="intention" type="text" size="40" maxlength="100" value="" /><br /> <input name="intention" id="intention" type="text" size="40" maxlength="100" value="#[intention]#" /><br />
This message will appear in the 'Other Peer Crawl Start' table of other peers. This message will appear in the 'Other Peer Crawl Start' table of other peers.
</td> </td>
</tr> </tr>
@ -335,7 +503,7 @@
</dl> </dl>
</fieldset> </fieldset>
<dt><input type="submit" name="crawlingstart" value="Start New Crawl Job" class="submitready"/></dt><dd></dd> <dt><input type="hidden" name="crawlingstart" value="1"/><input type="submit" value="Start New Crawl Job" class="submitready"/></dt><dd></dd>
</dl> </dl>
</fieldset> </fieldset>
</form> </form>

@ -42,36 +42,516 @@ public class CrawlStartExpert_p {
// return variable that accumulates replacements // return variable that accumulates replacements
final Switchboard sb = (Switchboard) env; final Switchboard sb = (Switchboard) env;
final serverObjects prop = new serverObjects(); final serverObjects prop = new serverObjects();
final String defaultCollection = "user";
// define visible variables // javascript values
prop.put("starturl", /*(intranet) ? repository :*/ ""); prop.put("matchAllStr", CrawlProfile.MATCH_ALL_STRING);
prop.put("proxyPrefetchDepth", env.getConfig("proxyPrefetchDepth", "0")); prop.put("matchNoneStr", CrawlProfile.MATCH_NEVER_STRING);
prop.put("crawlingDepth", Math.min(3, env.getConfigLong("crawlingDepth", 0))); prop.put("defaultCollection", defaultCollection);
prop.put("crawlingDepthExtension", CrawlProfile.MATCH_NEVER_STRING);
prop.put("directDocByURLChecked", sb.getConfigBool("crawlingDirectDocByURL", true) ? "1" : "0"); // ---------- Start point
prop.put("mustmatch", /*(intranet) ? repository + ".*" :*/ CrawlProfile.MATCH_ALL_STRING); // crawl start URL
prop.put("mustnotmatch", CrawlProfile.MATCH_NEVER_STRING); if (post != null && post.containsKey("crawlingURL")) {
prop.put("indexmustmatch", CrawlProfile.MATCH_ALL_STRING); final String crawlingURL = post.get("crawlingURL", "");
prop.put("indexmustnotmatch", CrawlProfile.MATCH_NEVER_STRING); prop.put("starturl", crawlingURL);
prop.put("indexcontentmustmatch", CrawlProfile.MATCH_ALL_STRING); // simple check for content since it may be empty
prop.put("indexcontentmustnotmatch", CrawlProfile.MATCH_NEVER_STRING); if (!crawlingURL.trim().isEmpty()) {
prop.put("ipMustmatch", sb.getConfig("crawlingIPMustMatch", CrawlProfile.MATCH_ALL_STRING)); prop.put("has_url", 1);
prop.put("ipMustnotmatch", sb.getConfig("crawlingIPMustNotMatch", CrawlProfile.MATCH_NEVER_STRING)); }
prop.put("countryMustMatch", sb.getConfig("crawlingCountryMustMatch", "")); } else {
prop.put("starturl", "");
}
// sitemap URL
if (post != null && post.containsKey("sitemapURL")) {
final String sitemapURL = post.get("sitemapURL", "");
prop.put("sitemapURL", sitemapURL);
// simple check for content since it may be empty
if (!sitemapURL.trim().isEmpty()) {
prop.put("has_sitemapURL", 1);
}
} else {
prop.put("sitemapURL", "");
}
// crawling file
if (post != null && post.containsKey("crawlingFile")) {
final String crawlingFile = post.get("crawlingFile", "");
prop.put("crawlingFile", crawlingFile);
// simple check for content since it may be empty
if (!crawlingFile.trim().isEmpty()) {
prop.put("has_crawlingFile", 1);
}
} else {
prop.put("crawlingFile", "");
}
// Crawling mode
if (post != null && post.containsKey("crawlingMode")) {
final String crawlingMode = post.get("crawlingMode", "");
boolean hasMode = false;
if (crawlingMode.equalsIgnoreCase("sitelist")
&& prop.getBoolean("has_url")) {
// sitelist needs "crawlingURL" parameter, checked already
prop.put("crawlingMode_sitelist", 1);
hasMode = true;
} else if (crawlingMode.equalsIgnoreCase("sitemap")
&& prop.getBoolean("has_sitemapURL")) {
// sitemap needs "sitemapURL" parameter, checked already
prop.put("crawlingMode_sitemap", 1);
hasMode = true;
} else if (crawlingMode.equalsIgnoreCase("file")
&& prop.getBoolean("has_crawlingFile")) {
// sitemap needs "crawlingFile" parameter, checked already
prop.put("crawlingMode_file", 1);
hasMode = true;
} else if (crawlingMode.equalsIgnoreCase("url")
&& prop.getBoolean("has_crawlingURL")) {
prop.put("crawlingMode_url", 1);
hasMode = true;
}
// try to guess mode
if (!hasMode) {
if (prop.getBoolean("has_url")) {
prop.put("crawlingMode_url", 1);
} else if (prop.getBoolean("has_sitemapURL")) {
prop.put("crawlingMode_sitemap", 1);
} else if (prop.getBoolean("has_crawlingFile")) {
prop.put("crawlingMode_file", 1);
} else {
prop.put("crawlingMode_url", 1);
}
}
} else {
// default to URL
prop.put("crawlingMode_url", 1);
}
// Bookmark title (set by script)
if (post != null && post.containsKey("bookmarkTitle")) {
prop.put("bookmarkTitle", post.get("bookmarkTitle", ""));
} else {
prop.put("bookmarkTitle", "");
}
// ---------- Crawling filter
final int crawlingDomMaxPages = env.getConfigInt(
"crawlingDomMaxPages", -1);
// crawling depth
if (post != null && post.containsKey("crawlingDepth")) {
final Integer depth = post.getInt("crawlingDepth", -1);
// depth is limited to two digits, zero allowed
if (depth >= 0 && depth < 100) {
prop.put("crawlingDepth", depth);
}
}
if (!prop.containsKey("crawlingDepth")) {
prop.put("crawlingDepth", Math.min(3,
env.getConfigLong("crawlingDepth", 0)));
}
// linked non-parseable documents?
if (post == null) {
prop.put("directDocByURLChecked",
sb.getConfigBool("crawlingDirectDocByURL", true) ? 1 : 0);
} else {
prop.put("directDocByURLChecked",
post.getBoolean("directDocByURL") ? 1 : 0);
}
// Unlimited crawl depth for URLs matching with
if (post != null && post.containsKey("crawlingDepthExtension")) {
prop.put("crawlingDepthExtension",
post.get("crawlingDepthExtension", ""));
} else {
prop.put("crawlingDepthExtension", CrawlProfile.MATCH_NEVER_STRING);
}
// Limit by maximum Pages per Domain?
if (post == null) {
prop.put("crawlingDomMaxCheck",
(crawlingDomMaxPages == -1) ? 0 : 1);
} else {
prop.put("crawlingDomMaxCheck",
post.getBoolean("crawlingDomMaxCheck") ? 1 : 0);
}
// Maximum Pages per Domain
if (post != null && post.containsKey("crawlingDomMaxPages")) {
final Integer maxPages = post.getInt("crawlingDomMaxPages", -1);
// depth is limited to six digits, zero not allowed
if (maxPages > 0 && maxPages < 1000000) {
prop.put("crawlingDomMaxPages", maxPages);
}
}
if (!prop.containsKey("crawlingDomMaxPages")) {
prop.put("crawlingDomMaxPages",
(crawlingDomMaxPages == -1) ? 10000 : crawlingDomMaxPages);
}
// Accept URLs with query-part?
// Obey html-robots-noindex?
if (post == null) {
prop.put("crawlingQChecked",
env.getConfigBool("crawlingQ", true) ? 1 : 0);
prop.put("obeyHtmlRobotsNoindexChecked",
env.getConfigBool("obeyHtmlRobotsNoindex", true) ? 1 : 0);
} else {
prop.put("crawlingQChecked", post.getBoolean("crawlingQ") ? 1 : 0);
prop.put("obeyHtmlRobotsNoindexChecked",
post.getBoolean("obeyHtmlRobotsNoindex") ? 1 : 0);
}
// Load Filter on URLs (range)
if (post != null && post.containsKey("range")) {
final String range = post.get("range", "");
if (range.equalsIgnoreCase("domain")) {
prop.put("range_domain", 1);
} else if (range.equalsIgnoreCase("subpath")) {
prop.put("range_subpath", 1);
} else if (range.equalsIgnoreCase("wide")) {
prop.put("range_wide", 1);
}
} else {
prop.put("range_wide", 1);
}
// Load Filter on URLs: must match
if (post != null && post.containsKey("mustmatch")) {
prop.put("mustmatch", post.get("mustmatch", ""));
} else {
prop.put("mustmatch", CrawlProfile.MATCH_ALL_STRING);
}
// Load Filter on URLs: must-not-match
if (post != null && post.containsKey("mustnotmatch")) {
prop.put("mustnotmatch", post.get("mustnotmatch", ""));
} else {
prop.put("mustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
}
// Load Filter on IPs: must match
if (post != null && post.containsKey("ipMustmatch")) {
prop.put("ipMustmatch", post.get("ipMustmatch", ""));
} else {
prop.put("ipMustmatch", sb.getConfig("crawlingIPMustMatch",
CrawlProfile.MATCH_ALL_STRING));
}
// Load Filter on IPs: must-not-match
if (post != null && post.containsKey("ipMustnotmatch")) {
prop.put("ipMustnotmatch", post.get("ipMustnotmatch", ""));
} else {
prop.put("ipMustnotmatch", sb.getConfig("crawlingIPMustNotMatch",
CrawlProfile.MATCH_NEVER_STRING));
}
// Use Country Codes Match-List?
if (post == null) {
// use the default that was set in the original template
prop.put("countryMustMatchSwitchChecked", 0);
} else {
prop.put("countryMustMatchSwitchChecked",
post.getBoolean("countryMustMatchSwitch") ? 1 : 0);
}
// Must-Match List for Country Codes
if (post != null && post.containsKey("countryMustMatchList")) {
prop.put("countryMustMatch", post.get("countryMustMatchList", ""));
} else {
prop.put("countryMustMatch",
sb.getConfig("crawlingCountryMustMatch", ""));
}
// ---------- Document filter
// Indexer filter on URLs: must match
if (post != null && post.containsKey("indexmustmatch")) {
prop.put("indexmustmatch", post.get("indexmustmatch", ""));
} else {
prop.put("indexmustmatch", CrawlProfile.MATCH_ALL_STRING);
}
// Indexer filter on URLs: must-no-match
if (post != null && post.containsKey("indexmustnotmatch")) {
prop.put("indexmustnotmatch", post.get("indexmustnotmatch", ""));
} else {
prop.put("indexmustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
}
// Filter on Content of Document: must match
if (post != null && post.containsKey("indexcontentmustmatch")) {
prop.put("indexcontentmustmatch",
post.get("indexcontentmustmatch", ""));
} else {
prop.put("indexcontentmustmatch", CrawlProfile.MATCH_ALL_STRING);
}
// Filter on Content of Document: must-not-match
if (post != null && post.containsKey("indexcontentmustnotmatch")) {
prop.put("indexcontentmustnotmatch",
post.get("indexcontentmustnotmatch", ""));
} else {
prop.put("indexcontentmustnotmatch", CrawlProfile.MATCH_NEVER_STRING);
}
// ---------- Clean-Up before Crawl Start
// delete if older settings: number value
prop.put("deleteIfOlderSelect", 1);
for (int i=0; i<13; i++) {
prop.put("deleteIfOlderSelect_list_"+i+"_name", Integer.toString(i));
}
prop.put("deleteIfOlderSelect_list_13_name", "14");
prop.put("deleteIfOlderSelect_list_14_name", "21");
prop.put("deleteIfOlderSelect_list_15_name", "28");
prop.put("deleteIfOlderSelect_list_16_name", "30");
prop.put("deleteIfOlderSelect_list", 17);
if (post != null && post.containsKey("deleteIfOlderNumber")) {
final Integer olderNumber = post.getInt("deleteIfOlderNumber", -1);
if (olderNumber >0 && olderNumber <= 12) {
prop.put("deleteIfOlderSelect_list_" + olderNumber +
"_default", 1);
} else {
switch (olderNumber) {
case 21:
prop.put("deleteIfOlderSelect_list_14_default", 1);
break;
case 28:
prop.put("deleteIfOlderSelect_list_15_default", 1);
break;
case 30:
prop.put("deleteIfOlderSelect_list_16_default", 1);
break;
default:
prop.put("deleteIfOlderSelect_list_13_default", 1);
break;
}
}
} else {
prop.put("deleteIfOlderSelect_list_13_default", 1);
}
// delete if older settings: number unit
prop.put("deleteIfOlderUnitSelect", 1);
prop.put("deleteIfOlderUnitSelect_list_0_name", "years");
prop.put("deleteIfOlderUnitSelect_list_0_value", "year");
prop.put("deleteIfOlderUnitSelect_list_1_name", "months");
prop.put("deleteIfOlderUnitSelect_list_1_value", "month");
prop.put("deleteIfOlderUnitSelect_list_2_name", "days");
prop.put("deleteIfOlderUnitSelect_list_2_value", "day");
prop.put("deleteIfOlderUnitSelect_list_3_name", "hours");
prop.put("deleteIfOlderUnitSelect_list_3_value", "hour");
prop.put("deleteIfOlderUnitSelect_list", 4);
if (post != null && post.containsKey("deleteIfOlderUnit")) {
final String olderUnit = post.get("deleteIfOlderUnit", "");
if (olderUnit.equalsIgnoreCase("year")) {
prop.put("deleteIfOlderUnitSelect_list_0_default", 1);
} else if (olderUnit.equalsIgnoreCase("month")) {
prop.put("deleteIfOlderUnitSelect_list_1_default", 1);
} else if (olderUnit.equalsIgnoreCase("hour")) {
prop.put("deleteIfOlderUnitSelect_list_3_default", 1);
} else {
prop.put("deleteIfOlderUnitSelect_list_2_default", 1);
}
} else {
prop.put("deleteIfOlderUnitSelect_list_2_default", 1);
}
// delete any document before the crawl is started?
if (post != null && post.containsKey("deleteold")) {
final String deleteold = post.get("deleteold", "");
if (deleteold.equalsIgnoreCase("on")){
prop.put("deleteold_on", 1);
} else if (deleteold.equalsIgnoreCase("age")) {
prop.put("deleteold_age", 1);
} else {
prop.put("deleteold_off", 1);
}
} else {
prop.put("deleteold_off", 1);
}
// ---------- Double-Check Rules
// reload settings: number value
prop.put("reloadIfOlderSelect", 1);
for (int i=0; i<13; i++) {
prop.put("reloadIfOlderSelect_list_"+i+"_name", Integer.toString(i));
}
prop.put("reloadIfOlderSelect_list_13_name", "14");
prop.put("reloadIfOlderSelect_list_14_name", "21");
prop.put("reloadIfOlderSelect_list_15_name", "28");
prop.put("reloadIfOlderSelect_list_16_name", "30");
prop.put("reloadIfOlderSelect_list", 17);
if (post != null && post.containsKey("reloadIfOlderNumber")) {
final Integer olderNumber = post.getInt("reloadIfOlderNumber", -1);
if (olderNumber >0 && olderNumber <= 12) {
prop.put("reloadIfOlderSelect_list_" + olderNumber +
"_default", 1);
} else {
switch (olderNumber) {
case 21:
prop.put("reloadIfOlderSelect_list_14_default", 1);
break;
case 28:
prop.put("reloadIfOlderSelect_list_15_default", 1);
break;
case 30:
prop.put("reloadIfOlderSelect_list_16_default", 1);
break;
default:
prop.put("reloadIfOlderSelect_list_13_default", 1);
break;
}
}
} else {
prop.put("reloadIfOlderSelect_list_13_default", 1);
}
// reload settings: number unit
prop.put("reloadIfOlderUnitSelect", 1);
prop.put("reloadIfOlderUnitSelect_list_0_name", "years");
prop.put("reloadIfOlderUnitSelect_list_0_value", "year");
prop.put("reloadIfOlderUnitSelect_list_1_name", "months");
prop.put("reloadIfOlderUnitSelect_list_1_value", "month");
prop.put("reloadIfOlderUnitSelect_list_2_name", "days");
prop.put("reloadIfOlderUnitSelect_list_2_value", "day");
prop.put("reloadIfOlderUnitSelect_list_3_name", "hours");
prop.put("reloadIfOlderUnitSelect_list_3_value", "hour");
prop.put("reloadIfOlderUnitSelect_list", 4);
if (post != null && post.containsKey("reloadIfOlderUnit")) {
final String olderUnit = post.get("reloadIfOlderUnit", "");
if (olderUnit.equalsIgnoreCase("year")) {
prop.put("reloadIfOlderUnitSelect_list_0_default", 1);
} else if (olderUnit.equalsIgnoreCase("month")) {
prop.put("reloadIfOlderUnitSelect_list_1_default", 1);
} else if (olderUnit.equalsIgnoreCase("hour")) {
prop.put("reloadIfOlderUnitSelect_list_3_default", 1);
} else {
prop.put("reloadIfOlderUnitSelect_list_2_default", 1);
}
} else {
prop.put("reloadIfOlderUnitSelect_list_2_default", 1);
}
if (post != null && post.containsKey("recrawl")) {
final String recrawl = post.get("recrawl", "");
if (recrawl.equalsIgnoreCase("reload")) {
prop.put("recrawl_reload", 1);
} else {
prop.put("recrawl_nodoubles", 1);
}
} else {
prop.put("recrawl_nodoubles", 1);
}
// ---------- Document Cache
// Store to Web Cache?
if (post == null) {
prop.put("storeHTCacheChecked",
env.getConfigBool("storeHTCache", true) ? 1 : 0);
} else {
prop.put("storeHTCacheChecked",
post.getBoolean("storeHTCache") ? 1 : 0);
}
// Policy for usage of Web Cache
if (post != null && post.containsKey("cachePolicy")) {
final String cachePolicy = post.get("cachePolicy", "");
if (cachePolicy.equalsIgnoreCase("nocache")) {
prop.put("cachePolicy_nocache", 1);
} else if (cachePolicy.equalsIgnoreCase("ifexist")) {
prop.put("cachePolicy_ifexist", 1);
} else if (cachePolicy.equalsIgnoreCase("cacheonly")) {
prop.put("cachePolicy_cacheonly", 1);
} else {
prop.put("cachePolicy_iffresh", 1);
}
} else {
prop.put("cachePolicy_iffresh", 1);
}
// ---------- Agent name (untested & untouched)
if (sb.isP2PMode()) {
prop.put("agentSelect", 0);
} else {
prop.put("agentSelect", 1);
List<String> agentNames = new ArrayList<String>();
if (sb.isIntranetMode()) {
agentNames.add(ClientIdentification.yacyIntranetCrawlerAgentName);
}
if (sb.isGlobalMode()) {
agentNames.add(ClientIdentification.yacyInternetCrawlerAgentName);
}
agentNames.add(ClientIdentification.googleAgentName);
if (sb.isAllIPMode()) {
agentNames.add(ClientIdentification.browserAgentName);
}
for (int i = 0; i < agentNames.size(); i++) {
prop.put("agentSelect_list_" + i + "_name", agentNames.get(i));
}
prop.put("agentSelect_list", agentNames.size());
}
prop.put("agentSelect_defaultAgentName",
ClientIdentification.yacyInternetCrawlerAgentName);
// ---------- Index Administration
// Do Local Indexing
if (post == null) {
// Local index text?
prop.put("indexingTextChecked",
env.getConfigBool("indexText", true) ? 1 : 0);
// Local index media?
prop.put("indexingMediaChecked",
env.getConfigBool("indexMedia", true) ? 1 : 0);
// Do Remote Indexing?
prop.put("crawlOrderChecked",
env.getConfigBool("crawlOrder", true) ? 1 : 0);
// Remote crawl intention
prop.put("intention", "");
} else {
prop.put("indexingTextChecked",
post.getBoolean("indexText") ? 1 : 0);
prop.put("indexingMediaChecked",
post.getBoolean("indexMedia") ? 1 : 0);
prop.put("crawlOrderChecked",
post.getBoolean("crawlOrder") ? 1 : 0);
prop.put("intention", post.get("intention", ""));
}
// Target collection
boolean collectionEnabled =
sb.index.fulltext().getDefaultConfiguration().isEmpty() ||
sb.index.fulltext().getDefaultConfiguration().contains(
CollectionSchema.collection_sxt);
prop.put("collectionEnabled", collectionEnabled ? 1 : 0);
if (collectionEnabled) {
if (post != null && post.containsKey("collection")) {
prop.put("collection", post.get("collection", ""));
} else {
prop.put("collection", collectionEnabled ? defaultCollection : "");
}
}
/* problaby unused (no corresponding entry in template)
prop.put("proxyPrefetchDepth", env.getConfig("proxyPrefetchDepth", "0"));
final int crawlingDomFilterDepth = env.getConfigInt("crawlingDomFilterDepth", -1); final int crawlingDomFilterDepth = env.getConfigInt("crawlingDomFilterDepth", -1);
prop.put("crawlingDomFilterCheck", (crawlingDomFilterDepth == -1) ? "0" : "1"); prop.put("crawlingDomFilterCheck", (crawlingDomFilterDepth == -1) ? "0" : "1");
prop.put("crawlingDomFilterDepth", (crawlingDomFilterDepth == -1) ? 1 : crawlingDomFilterDepth); prop.put("crawlingDomFilterDepth", (crawlingDomFilterDepth == -1) ? 1 : crawlingDomFilterDepth);
final int crawlingDomMaxPages = env.getConfigInt("crawlingDomMaxPages", -1);
prop.put("crawlingDomMaxCheck", (crawlingDomMaxPages == -1) ? "0" : "1");
prop.put("crawlingDomMaxPages", (crawlingDomMaxPages == -1) ? 10000 : crawlingDomMaxPages);
prop.put("crawlingQChecked", env.getConfigBool("crawlingQ", true) ? "1" : "0");
prop.put("followFramesChecked", env.getConfigBool("followFrames", true) ? "1" : "0"); prop.put("followFramesChecked", env.getConfigBool("followFrames", true) ? "1" : "0");
prop.put("obeyHtmlRobotsNoindexChecked", env.getConfigBool("obeyHtmlRobotsNoindex", true) ? "1" : "0");
prop.put("storeHTCacheChecked", env.getConfigBool("storeHTCache", true) ? "1" : "0");
prop.put("indexingTextChecked", env.getConfigBool("indexText", true) ? "1" : "0");
prop.put("indexingMediaChecked", env.getConfigBool("indexMedia", true) ? "1" : "0");
prop.put("crawlOrderChecked", env.getConfigBool("crawlOrder", true) ? "1" : "0");
final long LCbusySleep = env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 100L); final long LCbusySleep = env.getConfigLong(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL_BUSYSLEEP, 100L);
final int LCppm = (LCbusySleep == 0) ? 1000 : (int) (60000L / LCbusySleep); final int LCppm = (LCbusySleep == 0) ? 1000 : (int) (60000L / LCbusySleep);
@ -83,25 +563,8 @@ public class CrawlStartExpert_p {
prop.put("xsstopwChecked", env.getConfigBool("xsstopw", true) ? "1" : "0"); prop.put("xsstopwChecked", env.getConfigBool("xsstopw", true) ? "1" : "0");
prop.put("xdstopwChecked", env.getConfigBool("xdstopw", true) ? "1" : "0"); prop.put("xdstopwChecked", env.getConfigBool("xdstopw", true) ? "1" : "0");
prop.put("xpstopwChecked", env.getConfigBool("xpstopw", true) ? "1" : "0"); prop.put("xpstopwChecked", env.getConfigBool("xpstopw", true) ? "1" : "0");
*/
boolean collectionEnabled = sb.index.fulltext().getDefaultConfiguration().isEmpty() || sb.index.fulltext().getDefaultConfiguration().contains(CollectionSchema.collection_sxt);
prop.put("collectionEnabled", collectionEnabled ? 1 : 0);
prop.put("collection", collectionEnabled ? "user" : "");
if (sb.isP2PMode()) {
prop.put("agentSelect", 0);
} else {
prop.put("agentSelect", 1);
List<String> agentNames = new ArrayList<String>();
if (sb.isIntranetMode()) agentNames.add(ClientIdentification.yacyIntranetCrawlerAgentName);
if (sb.isGlobalMode()) agentNames.add(ClientIdentification.yacyInternetCrawlerAgentName);
agentNames.add(ClientIdentification.googleAgentName);
if (sb.isAllIPMode()) agentNames.add(ClientIdentification.browserAgentName);
for (int i = 0; i < agentNames.size(); i++) {
prop.put("agentSelect_list_" + i + "_name", agentNames.get(i));
}
prop.put("agentSelect_list", agentNames.size());
}
prop.put("agentSelect_defaultAgentName", ClientIdentification.yacyInternetCrawlerAgentName);
// return rewrite properties // return rewrite properties
return prop; return prop;
} }

@ -34,6 +34,7 @@ import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.document.analysis.Classification.ContentDomain; import net.yacy.cora.document.analysis.Classification.ContentDomain;
import net.yacy.cora.document.encoding.ASCII; import net.yacy.cora.document.encoding.ASCII;
import net.yacy.cora.document.id.DigestURL; import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.federate.yacy.CacheStrategy; import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification; import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader; import net.yacy.cora.protocol.RequestHeader;
@ -627,7 +628,7 @@ public class IndexControlRWIs_p {
filter, filter,
false, false,
null, null,
DigestURL.TLD_any_zone_filter, MultiProtocolURL.TLD_any_zone_filter,
"", "",
false, false,
sb.index, sb.index,

@ -71,7 +71,7 @@
</dt> </dt>
<dd style="width:360px;margin:0;padding:0;height:1.8em;float:left;display:inline;" id="boost_dd_#[field]#"> <dd style="width:360px;margin:0;padding:0;height:1.8em;float:left;display:inline;" id="boost_dd_#[field]#">
<input #(notinindexwarning)#::class="TableCellActive"#(/notinindexwarning)# name="boost_#[field]#" id="boost_#[field]#" type="text" align="right" size="10" value="#[boost]#" /> <input #(notinindexwarning)#::class="TableCellActive"#(/notinindexwarning)# name="boost_#[field]#" id="boost_#[field]#" type="text" align="right" size="10" value="#[boost]#" />
#(notinindexwarning)#::field not in local index (boost has no effect)#(/notinindexwarning)# #(notinindexwarning)#::field not in local index (boost has no effect)#(/notinindexwarning)#&nbsp;#[comment]#
</dd>#{/boosts}# </dd>#{/boosts}#
<dt style="width:260px;margin:0;padding:0;height:1.8em;"></dt> <dt style="width:260px;margin:0;padding:0;height:1.8em;"></dt>
<dd style="width:360px;margin:0;padding:0;height:1.8em;float:left;display:inline;"> <dd style="width:360px;margin:0;padding:0;height:1.8em;float:left;display:inline;">

@ -68,7 +68,7 @@ public class RankingSolr_p {
} }
} }
if (post != null && post.containsKey("ResetBoosts")) { if (post != null && post.containsKey("ResetBoosts")) {
String s = "url_paths_sxt^1000.0,synonyms_sxt^1.0,title^10000.0,text_t^2.0,h1_txt^1000.0,h2_txt^100.0,host_organization_s^100000.0"; String s = "url_paths_sxt^3.0,synonyms_sxt^0.5,title^5.0,text_t^1.0,host_s^6.0,h1_txt^5.0,url_file_name_tokens_t^4.0,h2_txt^3.0";
sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFIELDS_ + profileNr, s); sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFIELDS_ + profileNr, s);
sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).updateBoosts(s); sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).updateBoosts(s);
} }
@ -81,7 +81,7 @@ public class RankingSolr_p {
} }
} }
if (post != null && post.containsKey("ResetBQ")) { if (post != null && post.containsKey("ResetBQ")) {
String bq = "fuzzy_signature_unique_b:true^100000.0"; String bq = "clickdepth_i:0^0.8 clickdepth_i:1^0.4";
if (bq != null) { if (bq != null) {
sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTQUERY_ + profileNr, bq); sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTQUERY_ + profileNr, bq);
sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).setBoostQuery(bq); sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).setBoostQuery(bq);
@ -96,7 +96,7 @@ public class RankingSolr_p {
} }
} }
if (post != null && post.containsKey("ResetBF")) { if (post != null && post.containsKey("ResetBF")) {
String bf = "product(recip(rord(last_modified),1,1000,1000),div(product(log(product(references_external_i,references_exthosts_i)),div(references_internal_i,host_extent_i)),add(clickdepth_i,1)))"; String bf = "";
if (bf != null) { if (bf != null) {
sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFUNCTION_ + profileNr, bf); sb.setConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFUNCTION_ + profileNr, bf);
sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).setBoostFunction(bf); sb.index.fulltext().getDefaultConfiguration().getRanking(profileNr).setBoostFunction(bf);
@ -120,6 +120,7 @@ public class RankingSolr_p {
prop.put("boosts_" + i + "_boost", boost.toString()); prop.put("boosts_" + i + "_boost", boost.toString());
prop.put("boosts_" + i + "_notinindexwarning", (sb.index.fulltext().getDefaultConfiguration().contains(field.name())? "0" : "1") ); prop.put("boosts_" + i + "_notinindexwarning", (sb.index.fulltext().getDefaultConfiguration().contains(field.name())? "0" : "1") );
} }
prop.putHTML("boosts_" + i + "_comment", field.getComment());
i++; i++;
} }
prop.put("boosts", i); prop.put("boosts", i);

@ -84,14 +84,14 @@ To see a list of all APIs, please visit the <a href="http://www.yacy-websuche.de
</tr> </tr>
#{list}# #{list}#
<tr class="TableCell#(dark)#Light::Dark::Summary#(/dark)#" id="#[pk]#"> <tr class="TableCell#(dark)#Light::Dark::Summary#(/dark)#" id="#[pk]#">
<td align="left"><input type="checkbox" name="item_#[count]#" value="mark_#[pk]#" /></td> <td valign="top" align="left"><input type="checkbox" name="item_#[count]#" value="mark_#[pk]#" /></td>
<td>#[type]#</td> <td valign="top">#[type]##(isCrawlerStart)#::<br/><br/><a href="#[url]#" title="clone"><img src="/env/grafics/doc.gif"><img src="/env/grafics/right.gif"><img src="/env/grafics/doc.gif"></a>#(/isCrawlerStart)#</td>
<td>#[comment]#</td> <td valign="top">#[comment]#</td>
<td>#[callcount]#</td> <td valign="top">#[callcount]#</td>
<td>#[dateRecording]#</td> <td valign="top">#[dateRecording]#</td>
<td>#[dateLastExec]#</td> <td valign="top">#[dateLastExec]#</td>
<td>#[dateNextExec]#</td> <td valign="top">#[dateNextExec]#</td>
<td> <td valign="top">
#(event)# #(event)#
<select name="event_select_#[pk]#" onchange='submitchange("#[pk]#")'> <select name="event_select_#[pk]#" onchange='submitchange("#[pk]#")'>
<option value="off" selected="selected">no event</option> <option value="off" selected="selected">no event</option>
@ -137,7 +137,7 @@ To see a list of all APIs, please visit the <a href="http://www.yacy-websuche.de
</table> </table>
#(/event)# #(/event)#
</td> </td>
<td> <td valign="top">
#(scheduler)# #(scheduler)#
<select name="repeat_select_#[pk]#" onchange='submitchange("#[pk]#")'#(disabled)#:: disabled="disabled"#(/disabled)#> <select name="repeat_select_#[pk]#" onchange='submitchange("#[pk]#")'#(disabled)#:: disabled="disabled"#(/disabled)#>
<option value="off" selected="selected">no repetition</option> <option value="off" selected="selected">no repetition</option>
@ -161,7 +161,7 @@ To see a list of all APIs, please visit the <a href="http://www.yacy-websuche.de
</table> </table>
#(/scheduler)# #(/scheduler)#
</td> </td>
#(inline)#<td>#[url]#</td>::#(/inline)# #(inline)#<td valign="top">#[url]#</td>::#(/inline)#
</tr> </tr>
#{/list}# #{/list}#
</table> </table>

@ -287,6 +287,15 @@ public class Table_API_p {
prop.put("showtable_list_" + count + "_dateNextExec", date_next_exec == null ? "-" : DateFormat.getDateTimeInstance().format(date_next_exec)); prop.put("showtable_list_" + count + "_dateNextExec", date_next_exec == null ? "-" : DateFormat.getDateTimeInstance().format(date_next_exec));
prop.put("showtable_list_" + count + "_type", row.get(WorkTables.TABLE_API_COL_TYPE)); prop.put("showtable_list_" + count + "_type", row.get(WorkTables.TABLE_API_COL_TYPE));
prop.put("showtable_list_" + count + "_comment", row.get(WorkTables.TABLE_API_COL_COMMENT)); prop.put("showtable_list_" + count + "_comment", row.get(WorkTables.TABLE_API_COL_COMMENT));
// check type & action to link crawl start URLs back to CrawlStartExpert_p.html
if (prop.get("showtable_list_" + count + "_type", "").equals(WorkTables.TABLE_API_TYPE_CRAWLER)
&& prop.get("showtable_list_" + count + "_comment", "").startsWith("crawl start for")) {
prop.put("showtable_list_" + count + "_isCrawlerStart", 1);
final String editUrl = UTF8.String(row.get(WorkTables.TABLE_API_COL_URL)).replace("Crawler_p", "CrawlStartExpert_p");
prop.put("showtable_list_" + count + "_isCrawlerStart_url", editUrl);
} else {
prop.put("showtable_list_" + count + "_isCrawlerStart", 0);
}
prop.putHTML("showtable_list_" + count + "_inline_url", "http://" + sb.myPublicIP() + ":" + sb.getConfig("port", "8090") + UTF8.String(row.get(WorkTables.TABLE_API_COL_URL))); prop.putHTML("showtable_list_" + count + "_inline_url", "http://" + sb.myPublicIP() + ":" + sb.getConfig("port", "8090") + UTF8.String(row.get(WorkTables.TABLE_API_COL_URL)));
prop.put("showtable_list_" + count + "_scheduler_inline", inline ? "true" : "false"); prop.put("showtable_list_" + count + "_scheduler_inline", inline ? "true" : "false");
prop.put("showtable_list_" + count + "_scheduler_filter", typefilter.pattern()); prop.put("showtable_list_" + count + "_scheduler_filter", typefilter.pattern());

Binary file not shown.

Before

Width:  |  Height:  |  Size: 141 B

After

Width:  |  Height:  |  Size: 76 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 73 B

@ -40,7 +40,7 @@ import net.yacy.cora.document.analysis.Classification;
import net.yacy.cora.document.analysis.Classification.ContentDomain; import net.yacy.cora.document.analysis.Classification.ContentDomain;
import net.yacy.cora.document.encoding.ASCII; import net.yacy.cora.document.encoding.ASCII;
import net.yacy.cora.document.feed.RSSMessage; import net.yacy.cora.document.feed.RSSMessage;
import net.yacy.cora.document.id.DigestURL; import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.lod.vocabulary.Tagging; import net.yacy.cora.lod.vocabulary.Tagging;
import net.yacy.cora.protocol.Domains; import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework; import net.yacy.cora.protocol.HeaderFramework;
@ -241,7 +241,7 @@ public final class search {
null, null,
false, false,
null, null,
DigestURL.TLD_any_zone_filter, MultiProtocolURL.TLD_any_zone_filter,
client, client,
false, false,
indexSegment, indexSegment,
@ -306,7 +306,7 @@ public final class search {
constraint, constraint,
false, false,
null, null,
DigestURL.TLD_any_zone_filter, MultiProtocolURL.TLD_any_zone_filter,
client, client,
false, false,
sb.index, sb.index,

@ -45,6 +45,7 @@ import net.yacy.cora.document.analysis.Classification.ContentDomain;
import net.yacy.cora.document.encoding.UTF8; import net.yacy.cora.document.encoding.UTF8;
import net.yacy.cora.document.feed.RSSMessage; import net.yacy.cora.document.feed.RSSMessage;
import net.yacy.cora.document.id.DigestURL; import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.federate.opensearch.OpenSearchConnector; import net.yacy.cora.federate.opensearch.OpenSearchConnector;
import net.yacy.cora.federate.yacy.CacheStrategy; import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.geo.GeoLocation; import net.yacy.cora.geo.GeoLocation;
@ -658,7 +659,7 @@ public class yacysearch {
constraint, constraint,
true, true,
DigestURL.hosthashess(sb.getConfig("search.excludehosth", "")), DigestURL.hosthashess(sb.getConfig("search.excludehosth", "")),
DigestURL.TLD_any_zone_filter, MultiProtocolURL.TLD_any_zone_filter,
client, client,
authenticated, authenticated,
indexSegment, indexSegment,
@ -669,7 +670,7 @@ public class yacysearch {
&& sb.peers.mySeed().getFlagAcceptRemoteIndex(), && sb.peers.mySeed().getFlagAcceptRemoteIndex(),
false, false,
lat, lon, rad, lat, lon, rad,
sb.getConfig("search_navigation","").split(",")); sb.getConfig("search.navigation","").split(","));
EventTracker.delete(EventTracker.EClass.SEARCH); EventTracker.delete(EventTracker.EClass.SEARCH);
EventTracker.update(EventTracker.EClass.SEARCH, new ProfilingGraph.EventSearch( EventTracker.update(EventTracker.EClass.SEARCH, new ProfilingGraph.EventSearch(
theQuery.id(true), theQuery.id(true),

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

@ -185,7 +185,7 @@ public class RSSReader extends DefaultHandler {
} }
this.item = new RSSMessage(); this.item = new RSSMessage();
this.parsingItem = true; this.parsingItem = true;
} else if (this.parsingItem && this.type == Type.atom && "link".equals(tag) && (atts.getValue("type") == null || atts.getValue("type").startsWith("text"))) { } else if (this.parsingItem && this.type == Type.atom && "link".equals(tag) && (atts.getValue("type") == null || this.item.getLink().length() == 0 || atts.getValue("type").startsWith("text") || atts.getValue("type").equals("application/xhtml+xml"))) {
final String url = atts.getValue("href"); final String url = atts.getValue("href");
if (url != null && url.length() > 0) this.item.setValue(Token.link, url); if (url != null && url.length() > 0) this.item.setValue(Token.link, url);
} else if ("image".equals(tag) || (this.parsingItem && this.type == Type.atom && "link".equals(tag) && (atts.getValue("type") == null || atts.getValue("type").startsWith("image")))) { } else if ("image".equals(tag) || (this.parsingItem && this.type == Type.atom && "link".equals(tag) && (atts.getValue("type") == null || atts.getValue("type").startsWith("image")))) {

@ -836,7 +836,7 @@ public class MultiProtocolURL implements Serializable, Comparable<MultiProtocolU
* resulting words are not ordered by appearance, but all * resulting words are not ordered by appearance, but all
* @return * @return
*/ */
private static String toTokens(final String s) { public static String toTokens(final String s) {
// remove all non-character & non-number // remove all non-character & non-number
final StringBuilder sb = new StringBuilder(s.length()); final StringBuilder sb = new StringBuilder(s.length());
char c; char c;
@ -854,18 +854,18 @@ public class MultiProtocolURL implements Serializable, Comparable<MultiProtocolU
// split the string into tokens and add all camel-case splitting // split the string into tokens and add all camel-case splitting
final String[] u = CommonPattern.SPACE.split(t); final String[] u = CommonPattern.SPACE.split(t);
final Set<String> token = new LinkedHashSet<String>(); final Set<String> token = new LinkedHashSet<String>();
for (final String r: u) { for (final String r: u) token.add(r);
token.addAll(parseCamelCase(r)); for (final String r: u) token.addAll(parseCamelCase(r));
}
// construct a String again // construct a String again
for (final String v: token) if (v.length() > 1) t += ' ' + v; sb.setLength(0);
return t; for (final String v: token) if (v.length() > 1) sb.append(v).append(' ');
return sb.length() == 0 ? "" : sb.substring(0, sb.length() - 1);
} }
public static enum CharType { low, high, number; } public static enum CharType { low, high, number; }
public static Set<String> parseCamelCase(String s) { private static Set<String> parseCamelCase(String s) {
final Set<String> token = new LinkedHashSet<String>(); final Set<String> token = new LinkedHashSet<String>();
if (s.isEmpty()) return token; if (s.isEmpty()) return token;
int p = 0; int p = 0;

@ -60,6 +60,7 @@ import net.yacy.cora.util.ConcurrentLog;
import com.google.common.net.InetAddresses; import com.google.common.net.InetAddresses;
import com.google.common.util.concurrent.SimpleTimeLimiter; import com.google.common.util.concurrent.SimpleTimeLimiter;
import com.google.common.util.concurrent.TimeLimiter; import com.google.common.util.concurrent.TimeLimiter;
import com.google.common.util.concurrent.UncheckedTimeoutException;
public class Domains { public class Domains {
@ -776,7 +777,7 @@ public class Domains {
ip = null; ip = null;
} }
} }
if (ip == null) { if (ip == null) try {
ip = timeLimiter.callWithTimeout(new Callable<InetAddress>() { ip = timeLimiter.callWithTimeout(new Callable<InetAddress>() {
@Override @Override
public InetAddress call() throws Exception { public InetAddress call() throws Exception {
@ -784,6 +785,10 @@ public class Domains {
} }
}, 3000L, TimeUnit.MILLISECONDS, false); }, 3000L, TimeUnit.MILLISECONDS, false);
//ip = TimeoutRequest.getByName(host, 1000); // this makes the DNS request to backbone //ip = TimeoutRequest.getByName(host, 1000); // this makes the DNS request to backbone
} catch (final UncheckedTimeoutException e) {
// in case of a timeout - maybe cause of massive requests - do not fill NAME_CACHE_MISS
LOOKUP_SYNC.remove(host);
return null;
} }
//.out.println("DNSLOOKUP-*LOOKUP* " + host + ", time = " + (System.currentTimeMillis() - t) + "ms"); //.out.println("DNSLOOKUP-*LOOKUP* " + host + ", time = " + (System.currentTimeMillis() - t) + "ms");
} catch (final Throwable e) { } catch (final Throwable e) {

@ -51,8 +51,6 @@ import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework; import net.yacy.cora.protocol.HeaderFramework;
import org.apache.http.Header; import org.apache.http.Header;
import org.apache.http.HeaderElement;
import org.apache.http.HeaderElementIterator;
import org.apache.http.HttpEntity; import org.apache.http.HttpEntity;
import org.apache.http.HttpEntityEnclosingRequest; import org.apache.http.HttpEntityEnclosingRequest;
import org.apache.http.HttpHost; import org.apache.http.HttpHost;
@ -79,10 +77,9 @@ import org.apache.http.entity.InputStreamEntity;
import org.apache.http.entity.mime.MultipartEntityBuilder; import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.ContentBody; import org.apache.http.entity.mime.content.ContentBody;
import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.DefaultConnectionKeepAliveStrategy;
import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicHeaderElementIterator;
import org.apache.http.protocol.HTTP; import org.apache.http.protocol.HTTP;
import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpContext;
import org.apache.http.util.ByteArrayBuffer; import org.apache.http.util.ByteArrayBuffer;
@ -360,7 +357,6 @@ public class HTTPClient {
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new IOException(e.getMessage()); // can be caused at java.net.URI.create() throw new IOException(e.getMessage()); // can be caused at java.net.URI.create()
} }
httpGet.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
if (!localhost) setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service if (!localhost) setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
return getContentBytes(httpGet, maxBytes); return getContentBytes(httpGet, maxBytes);
} }
@ -383,7 +379,6 @@ public class HTTPClient {
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new IOException(e.getMessage()); // can be caused at java.net.URI.create() throw new IOException(e.getMessage()); // can be caused at java.net.URI.create()
} }
httpGet.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
this.currentRequest = httpGet; this.currentRequest = httpGet;
execute(httpGet); execute(httpGet);
@ -399,7 +394,6 @@ public class HTTPClient {
public HttpResponse HEADResponse(final String uri) throws IOException { public HttpResponse HEADResponse(final String uri) throws IOException {
final MultiProtocolURL url = new MultiProtocolURL(uri); final MultiProtocolURL url = new MultiProtocolURL(uri);
final HttpHead httpHead = new HttpHead(url.toNormalform(true)); final HttpHead httpHead = new HttpHead(url.toNormalform(true));
httpHead.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service setHost(url.getHost()); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
execute(httpHead); execute(httpHead);
finish(); finish();
@ -421,7 +415,6 @@ public class HTTPClient {
if (this.currentRequest != null) throw new IOException("Client is in use!"); if (this.currentRequest != null) throw new IOException("Client is in use!");
final MultiProtocolURL url = new MultiProtocolURL(uri); final MultiProtocolURL url = new MultiProtocolURL(uri);
final HttpPost httpPost = new HttpPost(url.toNormalform(true)); final HttpPost httpPost = new HttpPost(url.toNormalform(true));
httpPost.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
String host = url.getHost(); String host = url.getHost();
if (host == null) host = Domains.LOCALHOST; if (host == null) host = Domains.LOCALHOST;
setHost(host); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service setHost(host); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
@ -458,7 +451,6 @@ public class HTTPClient {
*/ */
public byte[] POSTbytes(final MultiProtocolURL url, final String vhost, final Map<String, ContentBody> post, final boolean usegzip) throws IOException { public byte[] POSTbytes(final MultiProtocolURL url, final String vhost, final Map<String, ContentBody> post, final boolean usegzip) throws IOException {
final HttpPost httpPost = new HttpPost(url.toNormalform(true)); final HttpPost httpPost = new HttpPost(url.toNormalform(true));
httpPost.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
setHost(vhost); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service setHost(vhost); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
if (vhost == null) setHost(Domains.LOCALHOST); if (vhost == null) setHost(Domains.LOCALHOST);
@ -491,7 +483,6 @@ public class HTTPClient {
public byte[] POSTbytes(final String uri, final InputStream instream, final long length) throws IOException { public byte[] POSTbytes(final String uri, final InputStream instream, final long length) throws IOException {
final MultiProtocolURL url = new MultiProtocolURL(uri); final MultiProtocolURL url = new MultiProtocolURL(uri);
final HttpPost httpPost = new HttpPost(url.toNormalform(true)); final HttpPost httpPost = new HttpPost(url.toNormalform(true));
httpPost.addHeader(new BasicHeader("Connection", "close")); // don't keep alive, prevent CLOSE_WAIT state
String host = url.getHost(); String host = url.getHost();
if (host == null) host = Domains.LOCALHOST; if (host == null) host = Domains.LOCALHOST;
setHost(host); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service setHost(host); // overwrite resolved IP, needed for shared web hosting DO NOT REMOVE, see http://en.wikipedia.org/wiki/Shared_web_hosting_service
@ -536,6 +527,7 @@ public class HTTPClient {
ConnectionInfo.removeConnection(this.currentRequest.hashCode()); ConnectionInfo.removeConnection(this.currentRequest.hashCode());
this.currentRequest.abort(); this.currentRequest.abort();
this.currentRequest = null; this.currentRequest = null;
this.httpResponse.close();
throw e; throw e;
} }
} }
@ -563,6 +555,7 @@ public class HTTPClient {
ConnectionInfo.removeConnection(this.currentRequest.hashCode()); ConnectionInfo.removeConnection(this.currentRequest.hashCode());
this.currentRequest.abort(); this.currentRequest.abort();
this.currentRequest = null; this.currentRequest = null;
this.httpResponse.close();
throw e; throw e;
} }
} }
@ -577,10 +570,11 @@ public class HTTPClient {
public void finish() throws IOException { public void finish() throws IOException {
if (this.httpResponse != null) { if (this.httpResponse != null) {
final HttpEntity httpEntity = this.httpResponse.getEntity(); final HttpEntity httpEntity = this.httpResponse.getEntity();
if (httpEntity != null && httpEntity.isStreaming()) { if (httpEntity != null && httpEntity.isStreaming()) {
// Ensures that the entity content is fully consumed and the content stream, if exists, is closed. // Ensures that the entity content is fully consumed and the content stream, if exists, is closed.
EntityUtils.consume(httpEntity); EntityUtils.consume(httpEntity);
} }
this.httpResponse.close();
} }
if (this.currentRequest != null) { if (this.currentRequest != null) {
ConnectionInfo.removeConnection(this.currentRequest.hashCode()); ConnectionInfo.removeConnection(this.currentRequest.hashCode());
@ -607,6 +601,7 @@ public class HTTPClient {
httpUriRequest.abort(); httpUriRequest.abort();
throw e; throw e;
} finally { } finally {
if (this.httpResponse != null) this.httpResponse.close();
ConnectionInfo.removeConnection(httpUriRequest.hashCode()); ConnectionInfo.removeConnection(httpUriRequest.hashCode());
} }
} }
@ -639,6 +634,7 @@ public class HTTPClient {
} catch (final IOException e) { } catch (final IOException e) {
ConnectionInfo.removeConnection(httpUriRequest.hashCode()); ConnectionInfo.removeConnection(httpUriRequest.hashCode());
httpUriRequest.abort(); httpUriRequest.abort();
if (this.httpResponse != null) this.httpResponse.close();
throw new IOException("Client can't execute: " throw new IOException("Client can't execute: "
+ (e.getCause() == null ? e.getMessage() : e.getCause().getMessage()) + (e.getCause() == null ? e.getMessage() : e.getCause().getMessage())
+ " duration=" + Long.toString(System.currentTimeMillis() - time)); + " duration=" + Long.toString(System.currentTimeMillis() - time));
@ -681,6 +677,7 @@ public class HTTPClient {
httpUriRequest.setHeader(HTTP.TARGET_HOST, this.host); httpUriRequest.setHeader(HTTP.TARGET_HOST, this.host);
if (this.realm != null) if (this.realm != null)
httpUriRequest.setHeader("Authorization", "realm=" + this.realm); httpUriRequest.setHeader("Authorization", "realm=" + this.realm);
httpUriRequest.setHeader("Connection", "close"); // don't keep alive, prevent CLOSE_WAIT state
} }
private void storeConnectionInfo(final HttpUriRequest httpUriRequest) { private void storeConnectionInfo(final HttpUriRequest httpUriRequest) {
@ -734,35 +731,25 @@ public class HTTPClient {
/** /**
* If the Keep-Alive header is not present in the response, * If the Keep-Alive header is not present in the response,
* HttpClient assumes the connection can be kept alive indefinitely. * HttpClient assumes the connection can be kept alive indefinitely.
* Here we limit this to 5 seconds. * Here we limit this to 5 seconds if unset and to a max of 25 seconds
* *
* @param defaultHttpClient * @param defaultHttpClient
*/ */
private static ConnectionKeepAliveStrategy customKeepAliveStrategy() { private static ConnectionKeepAliveStrategy customKeepAliveStrategy() {
return new ConnectionKeepAliveStrategy() { return new DefaultConnectionKeepAliveStrategy() {
@Override @Override
public long getKeepAliveDuration(HttpResponse response, HttpContext context) { public long getKeepAliveDuration(HttpResponse response,
// Honor 'keep-alive' header HttpContext context) {
String param, value; long keepAlive = super.getKeepAliveDuration(response, context);
HeaderElement element; if (keepAlive < 1) {
HeaderElementIterator it = new BasicHeaderElementIterator( // Keep connections alive 5 seconds if a keep-alive value
response.headerIterator(HTTP.CONN_KEEP_ALIVE)); // has not be explicitly set by the server
while (it.hasNext()) { keepAlive = 5000;
element = it.nextElement(); }
param = element.getName(); return Math.min(keepAlive, 25000);
value = element.getValue();
if (value != null && param.equalsIgnoreCase("timeout")) {
try {
return Long.parseLong(value) * 1000;
} catch(final NumberFormatException e) {
}
}
}
// Keep alive for 5 seconds only
return 5 * 1000;
} }
}; };
} }
/** /**
* testing * testing

@ -133,7 +133,7 @@ public final class CrawlStacker {
// this is the method that is called by the busy thread from outside // this is the method that is called by the busy thread from outside
if (entry == null) return null; if (entry == null) return null;
// record the link graph for this request // record the link graph for this request; this can be overwritten, replaced and enhanced by an index writing process in Segment.storeDocument
byte[] anchorhash = entry.url().hash(); byte[] anchorhash = entry.url().hash();
IndexCell<CitationReference> urlCitationIndex = this.indexSegment.urlCitation(); IndexCell<CitationReference> urlCitationIndex = this.indexSegment.urlCitation();
if (urlCitationIndex != null && entry.referrerhash() != null) try { if (urlCitationIndex != null && entry.referrerhash() != null) try {

@ -44,18 +44,28 @@ public class DidYouMean {
'\u00e8','\u00e9','\u00ea','\u00eb','\u00ec','\u00ed','\u00ee','\u00ef', '\u00e8','\u00e9','\u00ea','\u00eb','\u00ec','\u00ed','\u00ee','\u00ef',
'\u00f0','\u00f1','\u00f2','\u00f3','\u00f4','\u00f5','\u00f6', '\u00f0','\u00f1','\u00f2','\u00f3','\u00f4','\u00f5','\u00f6',
'\u00f8','\u00f9','\u00fa','\u00fb','\u00fc','\u00fd','\u00fe','\u00ff'}; '\u00f8','\u00f9','\u00fa','\u00fb','\u00fc','\u00fd','\u00fe','\u00ff'};
private static final char[] ALPHABET_KANJI = new char[512]; private static final char[] ALPHABET_KANJI = new char[512]; // \u3400-\u34ff + \u4e00-\u4eff
private static final char[] ALPHABET_HIRAGANA = new char[96]; // \u3040-\u309F
private static final char[] ALPHABET_KATAKANA = new char[96]; // \u30A0-\u30FF
private static final char[] ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part1 = new char[5376]; // \u4E00-\u62FF
private static final char[] ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part2 = new char[5376]; // \u6300-\u77FF
private static final char[] ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part3 = new char[5376]; // \u7800-\u8CFF
private static final char[] ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part4 = new char[4864]; // \u8D00-\u9FFF
static { static {
// this is very experimental: a very small subset of Kanji // this is very experimental: a very small subset of Kanji
for (char a = '\u3400'; a <= '\u34ff'; a++) { for (char a = '\u3400'; a <= '\u34ff'; a++) ALPHABET_KANJI[0xff & (a - '\u3400')] = a;
ALPHABET_KANJI[0xff & (a - '\u3400')] = a; for (char a = '\u4e00'; a <= '\u4eff'; a++) ALPHABET_KANJI[0xff & (a - '\u4e00') + 256] = a;
} for (char a = '\u3040'; a <= '\u309F'; a++) ALPHABET_HIRAGANA[0xff & (a - '\u3040')] = a;
for (char a = '\u4e00'; a <= '\u4eff'; a++) { for (char a = '\u30A0'; a <= '\u30FF'; a++) ALPHABET_KATAKANA[0xff & (a - '\u30A0')] = a;
ALPHABET_KANJI[0xff & (a - '\u4e00') + 256] = a; for (char a = '\u4E00'; a <= '\u62FF'; a++) ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part1[0xff & (a - '\u4E00')] = a;
} for (char a = '\u6300'; a <= '\u77FF'; a++) ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part2[0xff & (a - '\u6300')] = a;
for (char a = '\u7800'; a <= '\u8CFF'; a++) ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part3[0xff & (a - '\u7800')] = a;
for (char a = '\u8D00'; a <= '\u9FFF'; a++) ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part4[0xff & (a - '\u8D00')] = a;
} }
private static final char[][] ALPHABETS = {ALPHABET_LATIN, ALPHABET_KANJI}; private static final char[][] ALPHABETS = {
ALPHABET_LATIN, ALPHABET_KANJI, ALPHABET_HIRAGANA, ALPHABET_KATAKANA,
ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part1, ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part2, ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part3, ALPHABET_CJK_UNIFIED_IDEOGRAPHS_Part4};
private static final StringBuilder POISON_STRING = new StringBuilder("\n"); private static final StringBuilder POISON_STRING = new StringBuilder("\n");
public static final int AVAILABLE_CPU = Runtime.getRuntime().availableProcessors(); public static final int AVAILABLE_CPU = Runtime.getRuntime().availableProcessors();
private static final wordLengthComparator WORD_LENGTH_COMPARATOR = new wordLengthComparator(); private static final wordLengthComparator WORD_LENGTH_COMPARATOR = new wordLengthComparator();
@ -92,11 +102,16 @@ public class DidYouMean {
alphatest: for (final char[] alpha: ALPHABETS) { alphatest: for (final char[] alpha: ALPHABETS) {
if (isAlphabet(alpha, testchar)) { if (isAlphabet(alpha, testchar)) {
this.alphabet = new char[alpha.length]; this.alphabet = new char[alpha.length];
System.arraycopy(ALPHABET_LATIN, 0, this.alphabet, 0, alpha.length); System.arraycopy(alpha, 0, this.alphabet, 0, alpha.length);
alphafound = true; alphafound = true;
break alphatest; break alphatest;
} }
} }
if (!alphafound && testchar < 'A') {
this.alphabet = new char[ALPHABET_LATIN.length];
System.arraycopy(ALPHABET_LATIN, 0, this.alphabet, 0, ALPHABET_LATIN.length);
alphafound = true;
}
if (!alphafound) { if (!alphafound) {
// generate generic alphabet using simply a character block of 256 characters // generate generic alphabet using simply a character block of 256 characters
final int firstchar = (0xff & (testchar / 256)) * 256; final int firstchar = (0xff & (testchar / 256)) * 256;

@ -469,10 +469,19 @@ public final class Switchboard extends serverSwitch {
for (int i = 0; i <= 3; i++) { for (int i = 0; i <= 3; i++) {
// must be done every time the boosts change // must be done every time the boosts change
Ranking r = solrCollectionConfigurationWork.getRanking(i); Ranking r = solrCollectionConfigurationWork.getRanking(i);
r.setName(this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTNAME_ + i, "_dummy" + i)); String name = this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTNAME_ + i, "_dummy" + i);
r.updateBoosts(this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFIELDS_ + i, "text_t^1.0")); String boosts = this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFIELDS_ + i, "text_t^1.0");
r.setBoostQuery(this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTQUERY_ + i, "")); String bq = this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTQUERY_ + i, "");
r.setBoostFunction(this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFUNCTION_ + i, "")); String bf = this.getConfig(SwitchboardConstants.SEARCH_RANKING_SOLR_COLLECTION_BOOSTFUNCTION_ + i, "");
// apply some hard-coded patches for earlier experiments we do not want any more
if (bf.equals("product(recip(rord(last_modified),1,1000,1000),div(product(log(product(references_external_i,references_exthosts_i)),div(references_internal_i,host_extent_i)),add(clickdepth_i,1)))") ||
bf.equals("scale(cr_host_norm_i,1,20)")) bf = "";
if (i == 0 && bq.equals("fuzzy_signature_unique_b:true^100000.0")) bq = "clickdepth_i:0^0.8 clickdepth_i:1^0.4";
if (boosts.equals("url_paths_sxt^1000.0,synonyms_sxt^1.0,title^10000.0,text_t^2.0,h1_txt^1000.0,h2_txt^100.0,host_organization_s^100000.0")) boosts = "url_paths_sxt^3.0,synonyms_sxt^0.5,title^5.0,text_t^1.0,host_s^6.0,h1_txt^5.0,url_file_name_tokens_t^4.0,h2_txt^2.0";
r.setName(name);
r.updateBoosts(boosts);
r.setBoostQuery(bq);
r.setBoostFunction(bf);
} }
// initialize index // initialize index

@ -81,12 +81,11 @@ import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrException; import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.lucene.util.Version; import org.apache.lucene.util.Version;
import org.apache.solr.core.CoreContainer;
public final class Fulltext { public final class Fulltext {
private static final String SOLR_PATH = "solr_44"; // the number should be identical to the number in the property luceneMatchVersion in solrconfig.xml private static final String SOLR_PATH = "solr_45"; // the number should be identical to the number in the property luceneMatchVersion in solrconfig.xml
private static final String SOLR_OLD_PATH[] = new String[]{"solr_36", "solr_40"}; private static final String SOLR_OLD_PATH[] = new String[]{"solr_36", "solr_40", "solr_44"};
// class objects // class objects
private final File segmentPath; private final File segmentPath;
@ -167,7 +166,6 @@ public final class Fulltext {
File oldLocation = new File(this.segmentPath, oldVersion); File oldLocation = new File(this.segmentPath, oldVersion);
if (oldLocation.exists()) oldLocation.renameTo(solrLocation); if (oldLocation.exists()) oldLocation.renameTo(solrLocation);
} }
assert CoreContainer.DEFAULT_DEFAULT_CORE_NAME.equals(CollectionSchema.CORE_NAME); // check that solr and we use the same default core name
EmbeddedInstance localCollectionInstance = new EmbeddedInstance(new File(new File(Switchboard.getSwitchboard().appPath, "defaults"), "solr"), solrLocation, CollectionSchema.CORE_NAME, new String[]{CollectionSchema.CORE_NAME, WebgraphSchema.CORE_NAME}); EmbeddedInstance localCollectionInstance = new EmbeddedInstance(new File(new File(Switchboard.getSwitchboard().appPath, "defaults"), "solr"), solrLocation, CollectionSchema.CORE_NAME, new String[]{CollectionSchema.CORE_NAME, WebgraphSchema.CORE_NAME});
EmbeddedSolrConnector localCollectionConnector = new EmbeddedSolrConnector(localCollectionInstance); EmbeddedSolrConnector localCollectionConnector = new EmbeddedSolrConnector(localCollectionInstance);

@ -32,6 +32,7 @@ import java.net.MalformedURLException;
import java.util.Collection; import java.util.Collection;
import java.util.Date; import java.util.Date;
import java.util.Iterator; import java.util.Iterator;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TreeMap; import java.util.TreeMap;
@ -43,6 +44,7 @@ import org.apache.solr.common.SolrInputDocument;
import net.yacy.cora.document.encoding.ASCII; import net.yacy.cora.document.encoding.ASCII;
import net.yacy.cora.document.encoding.UTF8; import net.yacy.cora.document.encoding.UTF8;
import net.yacy.cora.document.id.AnchorURL;
import net.yacy.cora.document.id.DigestURL; import net.yacy.cora.document.id.DigestURL;
import net.yacy.cora.document.id.MultiProtocolURL; import net.yacy.cora.document.id.MultiProtocolURL;
import net.yacy.cora.federate.solr.connector.AbstractSolrConnector; import net.yacy.cora.federate.solr.connector.AbstractSolrConnector;
@ -422,6 +424,7 @@ public class Segment {
try { try {
return (int) this.fulltext.getDefaultConnector().getCountByQuery(CollectionSchema.text_t.getSolrFieldName() + ":\"" + word + "\""); return (int) this.fulltext.getDefaultConnector().getCountByQuery(CollectionSchema.text_t.getSolrFieldName() + ":\"" + word + "\"");
} catch (final Throwable e) { } catch (final Throwable e) {
ConcurrentLog.warn("Segment", "problem with word guess for word: " + word);
ConcurrentLog.logException(e); ConcurrentLog.logException(e);
return 0; return 0;
} }
@ -619,7 +622,7 @@ public class Segment {
char docType = Response.docType(document.dc_format()); char docType = Response.docType(document.dc_format());
// CREATE SOLR DOCUMENT // CREATE SOLR DOCUMENT
final CollectionConfiguration.SolrVector vector = this.fulltext.getDefaultConfiguration().yacy2solr(collections, responseHeader, document, condenser, referrerURL, language, urlCitationIndex, this.fulltext.getWebgraphConfiguration(), sourceName); final CollectionConfiguration.SolrVector vector = this.fulltext.getDefaultConfiguration().yacy2solr(collections, responseHeader, document, condenser, referrerURL, language, this.fulltext.getWebgraphConfiguration(), sourceName);
// ENRICH DOCUMENT WITH RANKING INFORMATION // ENRICH DOCUMENT WITH RANKING INFORMATION
if (this.connectedCitation()) { if (this.connectedCitation()) {
@ -628,21 +631,45 @@ public class Segment {
// STORE TO SOLR // STORE TO SOLR
String error = null; String error = null;
this.putDocumentInQueue(vector); this.putDocumentInQueue(vector);
if (this.fulltext.writeToWebgraph()) { List<SolrInputDocument> webgraph = vector.getWebgraphDocuments();
tryloop: for (int i = 0; i < 20; i++) { if (webgraph != null && webgraph.size() > 0) {
try {
error = null; // write the edges to the webgraph solr index
this.fulltext.putEdges(vector.getWebgraphDocuments()); if (this.fulltext.writeToWebgraph()) {
break tryloop; tryloop: for (int i = 0; i < 20; i++) {
} catch (final IOException e ) { try {
error = "failed to send " + urlNormalform + " to solr: " + e.getMessage(); error = null;
ConcurrentLog.warn("SOLR", error); this.fulltext.putEdges(webgraph);
if (i == 10) this.fulltext.commit(true); break tryloop;
try {Thread.sleep(1000);} catch (final InterruptedException e1) {} } catch (final IOException e ) {
continue tryloop; error = "failed to send " + urlNormalform + " to solr: " + e.getMessage();
ConcurrentLog.warn("SOLR", error);
if (i == 10) this.fulltext.commit(true);
try {Thread.sleep(1000);} catch (final InterruptedException e1) {}
continue tryloop;
}
}
}
// write the edges to the citation reference index
if (this.connectedCitation()) try {
// normal links
for (SolrInputDocument edge: webgraph) {
String referrerhash = (String) edge.getFieldValue(WebgraphSchema.source_id_s.getSolrFieldName());
String anchorhash = (String) edge.getFieldValue(WebgraphSchema.target_id_s.getSolrFieldName());
if (referrerhash != null && anchorhash != null) {
urlCitationIndex.add(ASCII.getBytes(anchorhash), new CitationReference(ASCII.getBytes(referrerhash), loadDate.getTime()));
}
} }
// media links as well!
for (AnchorURL image: document.getImages().keySet()) urlCitationIndex.add(image.hash(), new CitationReference(url.hash(), loadDate.getTime()));
for (AnchorURL audio: document.getAudiolinks().keySet()) urlCitationIndex.add(audio.hash(), new CitationReference(url.hash(), loadDate.getTime()));
for (AnchorURL video: document.getVideolinks().keySet()) urlCitationIndex.add(video.hash(), new CitationReference(url.hash(), loadDate.getTime()));
} catch (Throwable e) {
ConcurrentLog.logException(e);
} }
} }
if (error != null) { if (error != null) {
ConcurrentLog.severe("SOLR", error + ", PLEASE REPORT TO bugs.yacy.net"); ConcurrentLog.severe("SOLR", error + ", PLEASE REPORT TO bugs.yacy.net");
//Switchboard.getSwitchboard().pauseCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL, error); //Switchboard.getSwitchboard().pauseCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL, error);

@ -26,7 +26,6 @@
package net.yacy.search.query; package net.yacy.search.query;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
@ -133,60 +132,6 @@ public final class QueryParams {
private SolrQuery cachedQuery; private SolrQuery cachedQuery;
private CollectionConfiguration solrSchema; private CollectionConfiguration solrSchema;
public QueryParams(
final String query_original,
final String query_words,
final int itemsPerPage,
final Bitfield constraint,
final Segment indexSegment,
final RankingProfile ranking,
final String userAgent,
final String[] search_navigation) {
this.queryGoal = new QueryGoal(query_original, query_words);
this.ranking = ranking;
this.modifier = new QueryModifier();
this.maxDistance = Integer.MAX_VALUE;
this.urlMask = catchall_pattern;
this.urlMask_isCatchall = true;
this.tld = null;
this.inlink = null;
this.prefer = matchnothing_pattern;
this.contentdom = ContentDomain.ALL;
this.itemsPerPage = itemsPerPage;
this.offset = 0;
this.targetlang = "en";
this.metatags = new ArrayList<Tagging.Metatag>(0);
this.domType = Searchdom.LOCAL;
this.zonecode = DigestURL.TLD_any_zone_filter;
this.constraint = constraint;
this.allofconstraint = false;
this.snippetCacheStrategy = null;
this.clienthost = null;
this.siteexcludes = null;
this.remotepeer = null;
this.starttime = Long.valueOf(System.currentTimeMillis());
this.maxtime = 10000;
this.timeout = this.starttime + this.timeout;
this.indexSegment = indexSegment;
this.userAgent = userAgent;
this.transmitcount = 0;
this.filterfailurls = false;
this.filterscannerfail = false;
this.lat = 0.0d;
this.lon = 0.0d;
this.radius = 0.0d;
this.facetfields = new LinkedHashSet<String>();
this.solrSchema = indexSegment.fulltext().getDefaultConfiguration();
for (String navkey: search_navigation) {
CollectionSchema f = defaultfacetfields.get(navkey);
if (f != null && solrSchema.contains(f)) facetfields.add(f.getSolrFieldName());
}
for (Tagging v: LibraryProvider.autotagging.getVocabularies()) this.facetfields.add(CollectionSchema.VOCABULARY_PREFIX + v.getName() + CollectionSchema.VOCABULARY_SUFFIX);
this.maxfacets = defaultmaxfacets;
this.cachedQuery = null;
}
public QueryParams( public QueryParams(
final QueryGoal queryGoal, final QueryGoal queryGoal,
final QueryModifier modifier, final QueryModifier modifier,

@ -75,7 +75,7 @@ import net.yacy.document.parser.html.ImageEntry;
import net.yacy.kelondro.data.citation.CitationReference; import net.yacy.kelondro.data.citation.CitationReference;
import net.yacy.kelondro.data.meta.URIMetadataRow; import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.index.RowHandleMap; import net.yacy.kelondro.index.RowHandleMap;
import net.yacy.kelondro.rwi.IndexCell; import net.yacy.kelondro.rwi.ReferenceContainer;
import net.yacy.kelondro.util.Bitfield; import net.yacy.kelondro.util.Bitfield;
import net.yacy.search.index.Segment; import net.yacy.search.index.Segment;
import net.yacy.search.index.Segment.ReferenceReport; import net.yacy.search.index.Segment.ReferenceReport;
@ -211,10 +211,12 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
// path elements of link // path elements of link
String filename = digestURL.getFileName(); String filename = digestURL.getFileName();
String extension = MultiProtocolURL.getFileExtension(filename); String extension = MultiProtocolURL.getFileExtension(filename);
String filenameStub = filename.toLowerCase().endsWith("." + extension) ? filename.substring(0, filename.length() - extension.length() - 1) : filename;
if (allAttr || contains(CollectionSchema.url_chars_i)) add(doc, CollectionSchema.url_chars_i, us.length()); if (allAttr || contains(CollectionSchema.url_chars_i)) add(doc, CollectionSchema.url_chars_i, us.length());
if (allAttr || contains(CollectionSchema.url_protocol_s)) add(doc, CollectionSchema.url_protocol_s, digestURL.getProtocol()); if (allAttr || contains(CollectionSchema.url_protocol_s)) add(doc, CollectionSchema.url_protocol_s, digestURL.getProtocol());
if (allAttr || contains(CollectionSchema.url_paths_sxt)) add(doc, CollectionSchema.url_paths_sxt, digestURL.getPaths()); if (allAttr || contains(CollectionSchema.url_paths_sxt)) add(doc, CollectionSchema.url_paths_sxt, digestURL.getPaths());
if (allAttr || contains(CollectionSchema.url_file_name_s)) add(doc, CollectionSchema.url_file_name_s, filename.toLowerCase().endsWith("." + extension) ? filename.substring(0, filename.length() - extension.length() - 1) : filename); if (allAttr || contains(CollectionSchema.url_file_name_s)) add(doc, CollectionSchema.url_file_name_s, filenameStub);
if (allAttr || contains(CollectionSchema.url_file_name_tokens_t)) add(doc, CollectionSchema.url_file_name_tokens_t, MultiProtocolURL.toTokens(filenameStub));
if (allAttr || contains(CollectionSchema.url_file_ext_s)) add(doc, CollectionSchema.url_file_ext_s, extension); if (allAttr || contains(CollectionSchema.url_file_ext_s)) add(doc, CollectionSchema.url_file_ext_s, extension);
if (allAttr || contains(CollectionSchema.content_type)) add(doc, CollectionSchema.content_type, Response.doctype2mime(extension, doctype)); if (allAttr || contains(CollectionSchema.content_type)) add(doc, CollectionSchema.content_type, Response.doctype2mime(extension, doctype));
@ -339,7 +341,6 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
public SolrVector yacy2solr( public SolrVector yacy2solr(
final Map<String, Pattern> collections, final ResponseHeader responseHeader, final Map<String, Pattern> collections, final ResponseHeader responseHeader,
final Document document, final Condenser condenser, final DigestURL referrerURL, final String language, final Document document, final Condenser condenser, final DigestURL referrerURL, final String language,
final IndexCell<CitationReference> citations,
final WebgraphConfiguration webgraph, final String sourceName) { final WebgraphConfiguration webgraph, final String sourceName) {
// we use the SolrCell design as index schema // we use the SolrCell design as index schema
SolrVector doc = new SolrVector(); SolrVector doc = new SolrVector();
@ -353,7 +354,7 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
String us = digestURL.toNormalform(true); String us = digestURL.toNormalform(true);
int clickdepth = 999; int clickdepth = 999;
if ((allAttr || contains(CollectionSchema.clickdepth_i)) && citations != null) { if ((allAttr || contains(CollectionSchema.clickdepth_i))) {
if (digestURL.probablyRootURL()) { if (digestURL.probablyRootURL()) {
clickdepth = 0; clickdepth = 0;
} else { } else {
@ -818,15 +819,17 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
// create a subgraph // create a subgraph
if (!containsCanonical) { if (!containsCanonical) {
// a document with canonical tag should not get a webgraph relation, because that belongs to the canonical document // a document with canonical tag should not get a webgraph relation, because that belongs to the canonical document
webgraph.addEdges(subgraph, digestURL, responseHeader, collections, clickdepth, images, true, document.getAnchors(), citations, sourceName); webgraph.addEdges(subgraph, digestURL, responseHeader, collections, clickdepth, images, true, document.getAnchors(), sourceName);
} }
// list all links // list all links
doc.webgraphDocuments.addAll(subgraph.edges); doc.webgraphDocuments.addAll(subgraph.edges);
if (allAttr || contains(CollectionSchema.inboundlinks_protocol_sxt)) add(doc, CollectionSchema.inboundlinks_protocol_sxt, protocolList2indexedList(subgraph.urlProtocols[0])); if (allAttr || contains(CollectionSchema.inboundlinks_protocol_sxt)) add(doc, CollectionSchema.inboundlinks_protocol_sxt, protocolList2indexedList(subgraph.urlProtocols[0]));
if (allAttr || contains(CollectionSchema.inboundlinks_urlstub_sxt)) add(doc, CollectionSchema.inboundlinks_urlstub_sxt, subgraph.urlStubs[0]); if (allAttr || contains(CollectionSchema.inboundlinks_urlstub_sxt)) add(doc, CollectionSchema.inboundlinks_urlstub_sxt, subgraph.urlStubs[0]);
if (allAttr || contains(CollectionSchema.inboundlinks_anchortext_txt)) add(doc, CollectionSchema.inboundlinks_anchortext_txt, subgraph.urlAnchorTexts[0]);
if (allAttr || contains(CollectionSchema.outboundlinks_protocol_sxt)) add(doc, CollectionSchema.outboundlinks_protocol_sxt, protocolList2indexedList(subgraph.urlProtocols[1])); if (allAttr || contains(CollectionSchema.outboundlinks_protocol_sxt)) add(doc, CollectionSchema.outboundlinks_protocol_sxt, protocolList2indexedList(subgraph.urlProtocols[1]));
if (allAttr || contains(CollectionSchema.outboundlinks_urlstub_sxt)) add(doc, CollectionSchema.outboundlinks_urlstub_sxt, subgraph.urlStubs[1]); if (allAttr || contains(CollectionSchema.outboundlinks_urlstub_sxt)) add(doc, CollectionSchema.outboundlinks_urlstub_sxt, subgraph.urlStubs[1]);
if (allAttr || contains(CollectionSchema.outboundlinks_anchortext_txt)) add(doc, CollectionSchema.outboundlinks_anchortext_txt, subgraph.urlAnchorTexts[1]);
// charset // charset
if (allAttr || contains(CollectionSchema.charset_s)) add(doc, CollectionSchema.charset_s, document.getCharset()); if (allAttr || contains(CollectionSchema.charset_s)) add(doc, CollectionSchema.charset_s, document.getCharset());
@ -897,8 +900,40 @@ public class CollectionConfiguration extends SchemaConfiguration implements Seri
CollectionSchema.process_sxt.getSolrFieldName() + ":" + ProcessType.CITATION.toString(), CollectionSchema.process_sxt.getSolrFieldName() + ":" + ProcessType.CITATION.toString(),
10000000, CollectionSchema.host_s.getSolrFieldName()).get(CollectionSchema.host_s.getSolrFieldName()); 10000000, CollectionSchema.host_s.getSolrFieldName()).get(CollectionSchema.host_s.getSolrFieldName());
if (hostscore == null) hostscore = new ClusteredScoreMap<String>(); if (hostscore == null) hostscore = new ClusteredScoreMap<String>();
// for each host, do a citation rank computation
for (String host: hostscore.keyList(true)) { for (String host: hostscore.keyList(true)) {
// Patch the citation index for links with canonical tags.
// This shall fulfill the following requirement:
// If a document A links to B and B contains a 'canonical C', then the citation rank coputation shall consider that A links to C and B does not link to C.
// To do so, we first must collect all canonical links, find all references to them, get the anchor list of the documents and patch the citation reference of these links
BlockingQueue<SolrDocument> documents_with_canonical_tag = collectionConnector.concurrentDocumentsByQuery(
CollectionSchema.host_s.getSolrFieldName() + ":" + host + " AND " + CollectionSchema.canonical_s.getSolrFieldName() + ":[* TO *]",
0, 10000000, 60000L, 50,
CollectionSchema.id.getSolrFieldName(), CollectionSchema.sku.getSolrFieldName(), CollectionSchema.canonical_s.getSolrFieldName());
SolrDocument doc_B;
try {
while ((doc_B = documents_with_canonical_tag.take()) != AbstractSolrConnector.POISON_DOCUMENT) {
// find all documents which link to the canonical doc
DigestURL doc_C_url = new DigestURL((String) doc_B.getFieldValue(CollectionSchema.canonical_s.getSolrFieldName()));
byte[] doc_B_id = ASCII.getBytes(((String) doc_B.getFieldValue(CollectionSchema.id.getSolrFieldName())));
// we remove all references to B, because these become references to C
ReferenceContainer<CitationReference> doc_A_ids = segment.urlCitation().remove(doc_B_id);
if (doc_A_ids == null) {
//System.out.println("*** document with canonical but no referrer: " + doc_B.getFieldValue(CollectionSchema.sku.getSolrFieldName()));
continue; // the document has a canonical tag but no referrer?
}
Iterator<CitationReference> doc_A_ids_iterator = doc_A_ids.entries();
// for each of the referrer A of B, set A as a referrer of C
while (doc_A_ids_iterator.hasNext()) {
CitationReference doc_A_citation = doc_A_ids_iterator.next();
segment.urlCitation().add(doc_C_url.hash(), doc_A_citation);
}
}
} catch (InterruptedException e) {
} catch (SpaceExceededException e) {
}
// do the citation rank computation
if (hostscore.get(host) <= 0) continue; if (hostscore.get(host) <= 0) continue;
// select all documents for each host // select all documents for each host
CRHost crh = new CRHost(segment, rrCache, host, 0.85d, 6); CRHost crh = new CRHost(segment, rrCache, host, 0.85d, 6);

@ -45,7 +45,7 @@ public enum CollectionSchema implements SchemaDeclaration {
exact_signature_unique_b(SolrType.bool, true, true, false, false, false, "flag shows if exact_signature_l is unique at the time of document creation, used for double-check during search"), exact_signature_unique_b(SolrType.bool, true, true, false, false, false, "flag shows if exact_signature_l is unique at the time of document creation, used for double-check during search"),
exact_signature_copycount_i(SolrType.num_integer, true, true, false, false, false, "counter for the number of documents which are not unique (== count of not-unique-flagged documents + 1)"), exact_signature_copycount_i(SolrType.num_integer, true, true, false, false, false, "counter for the number of documents which are not unique (== count of not-unique-flagged documents + 1)"),
fuzzy_signature_l(SolrType.num_long, true, true, false, false, false, "64 bit of the Lookup3Signature from EnhancedTextProfileSignature of text_t"), fuzzy_signature_l(SolrType.num_long, true, true, false, false, false, "64 bit of the Lookup3Signature from EnhancedTextProfileSignature of text_t"),
fuzzy_signature_text_t(SolrType.text_general, true, true, false, false, false, "intermediate data produced in EnhancedTextProfileSignature: a list of word frequencies"), fuzzy_signature_text_t(SolrType.text_general, true, true, false, false, true, "intermediate data produced in EnhancedTextProfileSignature: a list of word frequencies"),
fuzzy_signature_unique_b(SolrType.bool, true, true, false, false, false, "flag shows if fuzzy_signature_l is unique at the time of document creation, used for double-check during search"), fuzzy_signature_unique_b(SolrType.bool, true, true, false, false, false, "flag shows if fuzzy_signature_l is unique at the time of document creation, used for double-check during search"),
fuzzy_signature_copycount_i(SolrType.num_integer, true, true, false, false, false, "counter for the number of documents which are not unique (== count of not-unique-flagged documents + 1)"), fuzzy_signature_copycount_i(SolrType.num_integer, true, true, false, false, false, "counter for the number of documents which are not unique (== count of not-unique-flagged documents + 1)"),
size_i(SolrType.num_integer, true, true, false, false, false, "the size of the raw source"),// int size(); size_i(SolrType.num_integer, true, true, false, false, false, "the size of the raw source"),// int size();
@ -119,9 +119,11 @@ public enum CollectionSchema implements SchemaDeclaration {
robots_i(SolrType.num_integer, true, true, false, false, false, "content of <meta name=\"robots\" content=#content#> tag and the \"X-Robots-Tag\" HTTP property"), robots_i(SolrType.num_integer, true, true, false, false, false, "content of <meta name=\"robots\" content=#content#> tag and the \"X-Robots-Tag\" HTTP property"),
metagenerator_t(SolrType.text_general, true, true, false, false, false, "content of <meta name=\"generator\" content=#content#> tag"), metagenerator_t(SolrType.text_general, true, true, false, false, false, "content of <meta name=\"generator\" content=#content#> tag"),
inboundlinks_protocol_sxt(SolrType.string, true, true, true, false, false, "internal links, only the protocol"), inboundlinks_protocol_sxt(SolrType.string, true, true, true, false, false, "internal links, only the protocol"),
inboundlinks_urlstub_sxt(SolrType.string, true, true, true, false, false, "internal links, the url only without the protocol"), inboundlinks_urlstub_sxt(SolrType.string, true, true, true, false, true, "internal links, the url only without the protocol"),
inboundlinks_anchortext_txt(SolrType.text_general, true, true, true, false, true, "internal links, the visible anchor text"),
outboundlinks_protocol_sxt(SolrType.string, true, true, true, false, false, "external links, only the protocol"), outboundlinks_protocol_sxt(SolrType.string, true, true, true, false, false, "external links, only the protocol"),
outboundlinks_urlstub_sxt(SolrType.string, true, true, true, false, false, "external links, the url only without the protocol"), outboundlinks_urlstub_sxt(SolrType.string, true, true, true, false, true, "external links, the url only without the protocol"),
outboundlinks_anchortext_txt(SolrType.text_general, true, true, true, false, true, "external links, the visible anchor text"),
images_text_t(SolrType.text_general, true, true, false, false, true, "all text/words appearing in image alt texts or the tokenized url"), images_text_t(SolrType.text_general, true, true, false, false, true, "all text/words appearing in image alt texts or the tokenized url"),
images_urlstub_sxt(SolrType.string, true, true, true, false, true, "all image links without the protocol and '://'"), images_urlstub_sxt(SolrType.string, true, true, true, false, true, "all image links without the protocol and '://'"),
@ -156,8 +158,9 @@ public enum CollectionSchema implements SchemaDeclaration {
publisher_url_s(SolrType.string, true, true, false, false, false, "publisher url as defined in http://support.google.com/plus/answer/1713826?hl=de"), publisher_url_s(SolrType.string, true, true, false, false, false, "publisher url as defined in http://support.google.com/plus/answer/1713826?hl=de"),
url_protocol_s(SolrType.string, true, true, false, false, false, "the protocol of the url"), url_protocol_s(SolrType.string, true, true, false, false, false, "the protocol of the url"),
url_file_name_s(SolrType.string, true, true, false, false, false, "the file name (which is the string after the last '/' and before the query part from '?' on) without the file extension"), url_file_name_s(SolrType.string, true, true, false, false, true, "the file name (which is the string after the last '/' and before the query part from '?' on) without the file extension"),
url_file_ext_s(SolrType.string, true, true, false, false, false, "the file name extension"), url_file_name_tokens_t(SolrType.text_general, true, true, false, false, true, "tokens generated from url_file_name_s which can be used for better matching and result boosting"),
url_file_ext_s(SolrType.string, true, true, false, false, true, "the file name extension"),
url_paths_sxt(SolrType.string, true, true, true, false, true, "all path elements in the url hpath (see: http://www.ietf.org/rfc/rfc1738.txt) without the file name"), url_paths_sxt(SolrType.string, true, true, true, false, true, "all path elements in the url hpath (see: http://www.ietf.org/rfc/rfc1738.txt) without the file name"),
url_parameter_i(SolrType.num_integer, true, true, false, false, false, "number of key-value pairs in search part of the url"), url_parameter_i(SolrType.num_integer, true, true, false, false, false, "number of key-value pairs in search part of the url"),
url_parameter_key_sxt(SolrType.string, true, true, true, false, false, "the keys from key-value pairs in the search part of the url"), url_parameter_key_sxt(SolrType.string, true, true, true, false, false, "the keys from key-value pairs in the search part of the url"),
@ -197,6 +200,9 @@ public enum CollectionSchema implements SchemaDeclaration {
cr_host_chance_d(SolrType.num_double, true, true, false, false, false, "the chance to click on this page when randomly clicking on links within on one host"), cr_host_chance_d(SolrType.num_double, true, true, false, false, false, "the chance to click on this page when randomly clicking on links within on one host"),
cr_host_norm_i(SolrType.num_integer, true, true, false, false, false, "normalization of chance: 0 for lower halve of cr_host_count_i urls, 1 for 1/2 of the remaining and so on. the maximum number is 10"), cr_host_norm_i(SolrType.num_integer, true, true, false, false, false, "normalization of chance: 0 for lower halve of cr_host_count_i urls, 1 for 1/2 of the remaining and so on. the maximum number is 10"),
// custom rating; values to influence the ranking in combination with boost rules
rating_i(SolrType.num_integer, true, true, false, false, false, "custom rating; to be set with external rating information"),
// special values; can only be used if '_val' type is defined in schema file; this is not standard // special values; can only be used if '_val' type is defined in schema file; this is not standard
bold_val(SolrType.num_integer, true, true, true, false, false, "number of occurrences of texts in bold_txt"), bold_val(SolrType.num_integer, true, true, true, false, false, "number of occurrences of texts in bold_txt"),
italic_val(SolrType.num_integer, true, true, true, false, false, "number of occurrences of texts in italic_txt"), italic_val(SolrType.num_integer, true, true, true, false, false, "number of occurrences of texts in italic_txt"),
@ -214,7 +220,7 @@ public enum CollectionSchema implements SchemaDeclaration {
ext_title_txt(SolrType.text_general, true, true, true, false, false, "names matching title expressions"), ext_title_txt(SolrType.text_general, true, true, true, false, false, "names matching title expressions"),
ext_title_val(SolrType.num_integer, true, true, true, false, false, "number of matching title expressions"); ext_title_val(SolrType.num_integer, true, true, true, false, false, "number of matching title expressions");
public final static String CORE_NAME = "collection1"; public final static String CORE_NAME = "collection1"; // this was the default core name up to Solr 4.4.0. This default name was stored in CoreContainer.DEFAULT_DEFAULT_CORE_NAME but was removed in Solr 4.5.0
public final static String VOCABULARY_PREFIX = "vocabulary_"; public final static String VOCABULARY_PREFIX = "vocabulary_";
public final static String VOCABULARY_SUFFIX = "_sxt"; public final static String VOCABULARY_SUFFIX = "_sxt";

@ -55,8 +55,6 @@ import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.util.CommonPattern; import net.yacy.cora.util.CommonPattern;
import net.yacy.cora.util.ConcurrentLog; import net.yacy.cora.util.ConcurrentLog;
import net.yacy.document.parser.html.ImageEntry; import net.yacy.document.parser.html.ImageEntry;
import net.yacy.kelondro.data.citation.CitationReference;
import net.yacy.kelondro.rwi.IndexCell;
import net.yacy.search.index.Segment; import net.yacy.search.index.Segment;
public class WebgraphConfiguration extends SchemaConfiguration implements Serializable { public class WebgraphConfiguration extends SchemaConfiguration implements Serializable {
@ -103,12 +101,13 @@ public class WebgraphConfiguration extends SchemaConfiguration implements Serial
} }
public static class Subgraph { public static class Subgraph {
public final ArrayList<String>[] urlProtocols, urlStubs; public final ArrayList<String>[] urlProtocols, urlStubs, urlAnchorTexts;
public final ArrayList<SolrInputDocument> edges; public final ArrayList<SolrInputDocument> edges;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public Subgraph(int inboundSize, int outboundSize) { public Subgraph(int inboundSize, int outboundSize) {
this.urlProtocols = new ArrayList[]{new ArrayList<String>(inboundSize), new ArrayList<String>(outboundSize)}; this.urlProtocols = new ArrayList[]{new ArrayList<String>(inboundSize), new ArrayList<String>(outboundSize)};
this.urlStubs = new ArrayList[]{new ArrayList<String>(inboundSize), new ArrayList<String>(outboundSize)}; this.urlStubs = new ArrayList[]{new ArrayList<String>(inboundSize), new ArrayList<String>(outboundSize)};
this.urlAnchorTexts = new ArrayList[]{new ArrayList<String>(inboundSize), new ArrayList<String>(outboundSize)};
this.edges = new ArrayList<SolrInputDocument>(inboundSize + outboundSize); this.edges = new ArrayList<SolrInputDocument>(inboundSize + outboundSize);
} }
} }
@ -117,7 +116,7 @@ public class WebgraphConfiguration extends SchemaConfiguration implements Serial
final Subgraph subgraph, final Subgraph subgraph,
final DigestURL source, final ResponseHeader responseHeader, Map<String, Pattern> collections, int clickdepth_source, final DigestURL source, final ResponseHeader responseHeader, Map<String, Pattern> collections, int clickdepth_source,
final List<ImageEntry> images, final boolean inbound, final Collection<AnchorURL> links, final List<ImageEntry> images, final boolean inbound, final Collection<AnchorURL> links,
final IndexCell<CitationReference> citations, final String sourceName) { final String sourceName) {
boolean allAttr = this.isEmpty(); boolean allAttr = this.isEmpty();
int target_order = 0; int target_order = 0;
boolean generalNofollow = responseHeader.get("X-Robots-Tag", "").indexOf("nofollow") >= 0; boolean generalNofollow = responseHeader.get("X-Robots-Tag", "").indexOf("nofollow") >= 0;
@ -228,8 +227,9 @@ public class WebgraphConfiguration extends SchemaConfiguration implements Serial
final String target_url_string = target_url.toNormalform(false); final String target_url_string = target_url.toNormalform(false);
int pr_target = target_url_string.indexOf("://",0); int pr_target = target_url_string.indexOf("://",0);
subgraph.urlProtocols[ioidx].add(target_url_string.substring(0, pr_target)); subgraph.urlProtocols[ioidx].add(target_url_string.substring(0, pr_target));
if (allAttr || contains(WebgraphSchema.target_protocol_s)) add(edge, WebgraphSchema.target_protocol_s, target_url_string.substring(0, pr_target));
subgraph.urlStubs[ioidx].add(target_url_string.substring(pr_target + 3)); subgraph.urlStubs[ioidx].add(target_url_string.substring(pr_target + 3));
subgraph.urlAnchorTexts[ioidx].add(text);
if (allAttr || contains(WebgraphSchema.target_protocol_s)) add(edge, WebgraphSchema.target_protocol_s, target_url_string.substring(0, pr_target));
if (allAttr || contains(WebgraphSchema.target_urlstub_s)) add(edge, WebgraphSchema.target_urlstub_s, target_url_string.substring(pr_target + 3)); if (allAttr || contains(WebgraphSchema.target_urlstub_s)) add(edge, WebgraphSchema.target_urlstub_s, target_url_string.substring(pr_target + 3));
Map<String, String> target_searchpart = target_url.getSearchpartMap(); Map<String, String> target_searchpart = target_url.getSearchpartMap();
if (target_searchpart == null) { if (target_searchpart == null) {
@ -268,7 +268,7 @@ public class WebgraphConfiguration extends SchemaConfiguration implements Serial
} }
if (this.contains(WebgraphSchema.target_protocol_s) && this.contains(WebgraphSchema.target_urlstub_s) && this.contains(WebgraphSchema.target_id_s)) { if (this.contains(WebgraphSchema.target_protocol_s) && this.contains(WebgraphSchema.target_urlstub_s) && this.contains(WebgraphSchema.target_id_s)) {
if ((allAttr || contains(WebgraphSchema.target_clickdepth_i)) && citations != null) { if ((allAttr || contains(WebgraphSchema.target_clickdepth_i))) {
if (target_url.probablyRootURL()) { if (target_url.probablyRootURL()) {
boolean lc = this.lazy; this.lazy = false; boolean lc = this.lazy; this.lazy = false;
add(edge, WebgraphSchema.target_clickdepth_i, 0); add(edge, WebgraphSchema.target_clickdepth_i, 0);

@ -1303,6 +1303,7 @@ public final class HTTPDProxyHandler {
(sslSocket.isBound()) && (sslSocket.isBound()) &&
(!(sslSocket.isClosed())) && (!(sslSocket.isClosed())) &&
(sslSocket.isConnected()) && (sslSocket.isConnected()) &&
(!sslSocket.isInputShutdown() && !sslSocket.isOutputShutdown()) &&
((cs.isAlive()) || (sc.isAlive()))) { ((cs.isAlive()) || (sc.isAlive()))) {
// idle // idle
try {Thread.sleep(1000);} catch (final InterruptedException e) {} // wait a while try {Thread.sleep(1000);} catch (final InterruptedException e) {} // wait a while
@ -1314,6 +1315,7 @@ public final class HTTPDProxyHandler {
cs.interrupt(); cs.interrupt();
sc.interrupt(); sc.interrupt();
// ...hope they have terminated... // ...hope they have terminated...
if (sslSocket != null) sslSocket.close();
} }
public static class Mediate extends Thread { public static class Mediate extends Thread {

Loading…
Cancel
Save